Skip to content

Commit

Permalink
test: 배포 테스트
Browse files Browse the repository at this point in the history
  • Loading branch information
mangowhoiscloud committed Sep 6, 2024
1 parent 1ce6d6f commit de836c9
Show file tree
Hide file tree
Showing 12 changed files with 209 additions and 2 deletions.
55 changes: 55 additions & 0 deletions .github/workflows/github-actions.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
name: CI/CD
on:
push:
branches:
- dev
- main

workflow_dispatch:

jobs:
AI-CI:
runs-on: ubuntu-latest

steps:
- name: 체크아웃
uses: actions/checkout@v2
with:
python-version: 3.12.4

- name: Configure AWS credentials
if: ${{ github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/main' }}
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
aws-region: ${{ secrets.AWS_REGION }}

- name: 배포 Dir 생성
if: ${{ github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/main' }}
working-directory: ./
run: |
mkdir -p deploy-ai
echo "${{ secrets.ENV_PY }}" | base64 --decode >> ./deploy-ai/env
cp ./app/* ./deploy-ai
cp ./services/* ./deploy-ai
cp ./ai_server.py ./deploy-ai
cp ./docker-compose.yml ./deploy-ai
cp ./Dockerfile ./deploy-ai
cp ./requirements.txt ./deploy-ai
- name: S3 업로드 및 CD 요청
if: ${{ github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/main' }}
working-directory: ./
run: |
cp utils/*.sh deploy-ai
cp utils/appspec.yml deploy-ai
cd deploy-ai && zip -r deploy-ai *
aws s3 cp deploy-ai.zip s3://${{ secrets.AWS_BUCKET_NAME }}/deploy-ai.zip
aws deploy create-deployment \
--application-name ${{ secrets.AWS_CODEDEPLOY_APP_NAME }} \
--deployment-config-name CodeDeployDefault.AllAtOnce \
--deployment-group-name ${{ secrets.AWS_CODEDEPLOY_GROUP_NAME_AI }} \
--file-exists-behavior OVERWRITE \
--s3-location bucket=${{ secrets.AWS_BUCKET_NAME }},bundleType=zip,key=deploy-ai.zip
8 changes: 8 additions & 0 deletions .idea/.gitignore

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 9 additions & 0 deletions .idea/dream-ai.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions .idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions .idea/modules.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions .idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

11 changes: 11 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# 베이스 이미지 선택
FROM python:3.12.4-slim

# 필요한 파일 복사
COPY . .

# 필요 패키지 설치
RUN pip install python-dotenv
RUN pip install --no-cache-dir -r requirements.txt

ENTRYPOINT ["python3", "ai_server.py"]
2 changes: 1 addition & 1 deletion app/es_vecDB_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from sentence_transformers import SentenceTransformer

# Elasticsearch 연결 설정
es = Elasticsearch([{'host': 'localhost', 'port': 9200, 'scheme': 'http'}])
es = Elasticsearch([{'host': 'elasticsearch', 'port': 9200, 'scheme': 'http'}])

# 사전 학습된 Sentence-BERT 모델 로드
model = SentenceTransformer('paraphrase-multilingual-MiniLM-L12-v2')
Expand Down
34 changes: 34 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
version: '3.8'

services:
ai:
build: .
container_name: ai
restart: on-failure
env_file:
- .env
networks:
- dream
- datasource
ports:
- "8000:8000"

elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:8.5.0
container_name: elasticsearch
environment:
- discovery.type=single-node
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
networks:
- datasource
expose:
- "9200"
- "9300"
volumes:
- esdata:/usr/share/elasticsearch/data

networks:
dream:
external: true
datasource:
external: true
2 changes: 1 addition & 1 deletion services/es_vecDB_save.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from sentence_transformers import SentenceTransformer

# Elasticsearch 연결 설정
es = Elasticsearch([{'host': 'localhost', 'port': 9200, 'scheme': 'http'}])
es = Elasticsearch([{'host': 'elasticsearch', 'port': 9200, 'scheme': 'http'}])

# 사전 학습된 Sentence-BERT 모델 로드
model = SentenceTransformer('paraphrase-multilingual-MiniLM-L12-v2')
Expand Down
18 changes: 18 additions & 0 deletions utils/appspec.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
version: 0.0
os: linux
files:
- source: /
destination: /home/ubuntu/deploy-ai
overwrite: yes

permissions:
- object: /
pattern: "**"
owner: ubuntu
group: ubuntu

hooks:
ApplicationStart:
- location: deploy.sh
timeout: 60
runas: ubuntu
52 changes: 52 additions & 0 deletions utils/deploy.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
#!/bin/bash

runNetwork(){
# 네트워크가 존재하는지 확인
if [ -z "$(docker network ls | grep harpsharp)" ]
then
echo "harpsharp 네트워크를 생성합니다."
docker network create dream
else
echo "harpsharp 네트워크가 이미 존재합니다."
fi

if [ -z "$(docker network ls | grep datasource)" ]
then
echo "harpsharp 네트워크를 생성합니다."
docker network create datasource
else
echo "harpsharp 네트워크가 이미 존재합니다."
fi
}

serviceDown(){
if [ $(docker ps -a -q -f name=$CONTAINER_NAME) ]
then
echo "컨테이너 $CONTAINER_NAME 종료 및 삭제 중..."

IMAGE_ID=$(docker images -q $CONTAINER_NAME)

if [ "$IMAGE_ID" ]; then
echo "이미지 $CONTAINER_NAME 삭제 중..."

docker rmi -f $IMAGE_ID
fi
fi
}

cleanUpImages(){
docker rmi $(docker images -f "dangling=true" -q)
sudo docker system prune -af
}


runNetwork

cd /home/ubuntu/deploy-ai
mv env .env
serviecDown ai
docker-compose up -d --build

cleanUpImages


0 comments on commit de836c9

Please sign in to comment.