diff --git a/.github/workflows/github-actions.yml b/.github/workflows/github-actions.yml
new file mode 100644
index 0000000..74207b1
--- /dev/null
+++ b/.github/workflows/github-actions.yml
@@ -0,0 +1,55 @@
+name: CI/CD
+on:
+ push:
+ branches:
+ - dev
+ - main
+
+ workflow_dispatch:
+
+jobs:
+ AI-CI:
+ runs-on: ubuntu-latest
+
+ steps:
+ - name: 체크아웃
+ uses: actions/checkout@v2
+ with:
+ python-version: 3.12.4
+
+ - name: Configure AWS credentials
+ if: ${{ github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/main' }}
+ uses: aws-actions/configure-aws-credentials@v1
+ with:
+ aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY }}
+ aws-secret-access-key: ${{ secrets.AWS_SECRET_KEY }}
+ aws-region: ${{ secrets.AWS_REGION }}
+
+ - name: 배포 Dir 생성
+ if: ${{ github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/main' }}
+ working-directory: ./
+ run: |
+ mkdir -p deploy-ai
+ echo "${{ secrets.ENV_PY }}" | base64 --decode >> ./deploy-ai/env
+ cp ./app/* ./deploy-ai
+ cp ./services/* ./deploy-ai
+ cp ./ai_server.py ./deploy-ai
+ cp ./docker-compose.yml ./deploy-ai
+ cp ./Dockerfile ./deploy-ai
+ cp ./requirements.txt ./deploy-ai
+
+ - name: S3 업로드 및 CD 요청
+ if: ${{ github.ref == 'refs/heads/dev' || github.ref == 'refs/heads/main' }}
+ working-directory: ./
+ run: |
+ cp utils/*.sh deploy-ai
+ cp utils/appspec.yml deploy-ai
+ cd deploy-ai && zip -r deploy-ai *
+ aws s3 cp deploy-ai.zip s3://${{ secrets.AWS_BUCKET_NAME }}/deploy-ai.zip
+
+ aws deploy create-deployment \
+ --application-name ${{ secrets.AWS_CODEDEPLOY_APP_NAME }} \
+ --deployment-config-name CodeDeployDefault.AllAtOnce \
+ --deployment-group-name ${{ secrets.AWS_CODEDEPLOY_GROUP_NAME_AI }} \
+ --file-exists-behavior OVERWRITE \
+ --s3-location bucket=${{ secrets.AWS_BUCKET_NAME }},bundleType=zip,key=deploy-ai.zip
diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 0000000..13566b8
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Editor-based HTTP Client requests
+/httpRequests/
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
diff --git a/.idea/dream-ai.iml b/.idea/dream-ai.iml
new file mode 100644
index 0000000..d6ebd48
--- /dev/null
+++ b/.idea/dream-ai.iml
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000..47478b9
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..0b6631a
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..35eb1dd
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 0000000..953514a
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,11 @@
+# 베이스 이미지 선택
+FROM python:3.12.4-slim
+
+# 필요한 파일 복사
+COPY . .
+
+# 필요 패키지 설치
+RUN pip install python-dotenv
+RUN pip install --no-cache-dir -r requirements.txt
+
+ENTRYPOINT ["python3", "ai_server.py"]
diff --git a/app/es_vecDB_search.py b/app/es_vecDB_search.py
index 6590856..560e1b3 100644
--- a/app/es_vecDB_search.py
+++ b/app/es_vecDB_search.py
@@ -2,7 +2,7 @@
from sentence_transformers import SentenceTransformer
# Elasticsearch 연결 설정
-es = Elasticsearch([{'host': 'localhost', 'port': 9200, 'scheme': 'http'}])
+es = Elasticsearch([{'host': 'elasticsearch', 'port': 9200, 'scheme': 'http'}])
# 사전 학습된 Sentence-BERT 모델 로드
model = SentenceTransformer('paraphrase-multilingual-MiniLM-L12-v2')
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 0000000..8e60081
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,34 @@
+version: '3.8'
+
+services:
+ ai:
+ build: .
+ container_name: ai
+ restart: on-failure
+ env_file:
+ - .env
+ networks:
+ - dream
+ - datasource
+ ports:
+ - "8000:8000"
+
+ elasticsearch:
+ image: docker.elastic.co/elasticsearch/elasticsearch:8.5.0
+ container_name: elasticsearch
+ environment:
+ - discovery.type=single-node
+ - "ES_JAVA_OPTS=-Xms512m -Xmx512m"
+ networks:
+ - datasource
+ expose:
+ - "9200"
+ - "9300"
+ volumes:
+ - esdata:/usr/share/elasticsearch/data
+
+networks:
+ dream:
+ external: true
+ datasource:
+ external: true
\ No newline at end of file
diff --git a/services/es_vecDB_save.py b/services/es_vecDB_save.py
index 6a9597d..a0e3348 100644
--- a/services/es_vecDB_save.py
+++ b/services/es_vecDB_save.py
@@ -2,7 +2,7 @@
from sentence_transformers import SentenceTransformer
# Elasticsearch 연결 설정
-es = Elasticsearch([{'host': 'localhost', 'port': 9200, 'scheme': 'http'}])
+es = Elasticsearch([{'host': 'elasticsearch', 'port': 9200, 'scheme': 'http'}])
# 사전 학습된 Sentence-BERT 모델 로드
model = SentenceTransformer('paraphrase-multilingual-MiniLM-L12-v2')
diff --git a/utils/appspec.yml b/utils/appspec.yml
new file mode 100644
index 0000000..97dd2c1
--- /dev/null
+++ b/utils/appspec.yml
@@ -0,0 +1,18 @@
+version: 0.0
+os: linux
+files:
+ - source: /
+ destination: /home/ubuntu/deploy-ai
+ overwrite: yes
+
+permissions:
+ - object: /
+ pattern: "**"
+ owner: ubuntu
+ group: ubuntu
+
+hooks:
+ ApplicationStart:
+ - location: deploy.sh
+ timeout: 60
+ runas: ubuntu
\ No newline at end of file
diff --git a/utils/deploy.sh b/utils/deploy.sh
new file mode 100644
index 0000000..4ecb59d
--- /dev/null
+++ b/utils/deploy.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+
+runNetwork(){
+ # 네트워크가 존재하는지 확인
+ if [ -z "$(docker network ls | grep harpsharp)" ]
+ then
+ echo "harpsharp 네트워크를 생성합니다."
+ docker network create dream
+ else
+ echo "harpsharp 네트워크가 이미 존재합니다."
+ fi
+
+ if [ -z "$(docker network ls | grep datasource)" ]
+ then
+ echo "harpsharp 네트워크를 생성합니다."
+ docker network create datasource
+ else
+ echo "harpsharp 네트워크가 이미 존재합니다."
+ fi
+}
+
+serviceDown(){
+ if [ $(docker ps -a -q -f name=$CONTAINER_NAME) ]
+ then
+ echo "컨테이너 $CONTAINER_NAME 종료 및 삭제 중..."
+
+ IMAGE_ID=$(docker images -q $CONTAINER_NAME)
+
+ if [ "$IMAGE_ID" ]; then
+ echo "이미지 $CONTAINER_NAME 삭제 중..."
+
+ docker rmi -f $IMAGE_ID
+ fi
+ fi
+}
+
+cleanUpImages(){
+ docker rmi $(docker images -f "dangling=true" -q)
+ sudo docker system prune -af
+}
+
+
+runNetwork
+
+cd /home/ubuntu/deploy-ai
+mv env .env
+serviecDown ai
+docker-compose up -d --build
+
+cleanUpImages
+
+