diff --git a/.gitignore b/.gitignore index d3e55db..634bbc8 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,4 @@ backup/ .coverage dataset-values.json +**/league-images/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a631fbd..66808a5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -19,6 +19,7 @@ repos: exclude: ^(legacy-backend|tests)/.*$ files: ^crispy-api/api/.*\.py$ stages: [commit] + additional_dependencies: ["types-requests"] - repo: https://github.com/psf/black rev: 22.10.0 hooks: diff --git a/README.md b/README.md index 44c5d47..b9994b5 100644 --- a/README.md +++ b/README.md @@ -2,38 +2,55 @@ # Crispy -Crispy is a machine-learning algorithm to make video-games montages efficiently. -It uses a neural network to detect highlights in the video-game frames.\ -[![Tech](https://skillicons.dev/icons?i=python,svelte,ts,css,html,docker,bash,mongo,github)](https://skillicons.dev) +**Crispy** is a machine learning platform designed to efficiently create video game montages. It utilizes neural networks to detect highlights within video game footage. + +[![Tech Stack](https://skillicons.dev/icons?i=python,svelte,ts,css,html,docker,bash,mongo,github)](https://skillicons.dev) # Demo -[live demo](https://crispy.gyroskan.com/)\ -[youtube demo](https://www.youtube.com/watch?v=svT-Z_MkAfw) +- [Live Demo](https://crispy.gyroskan.com/) +- [YouTube Demo](https://www.youtube.com/watch?v=svT-Z_MkAfw) + +# Supported Games -# Supported games +Crispy currently supports the following games: -Currently it supports **[Valorant](https://playvalorant.com/)**, **[Overwatch](https://playoverwatch.com/)**, **[CSGO2](https://www.counter-strike.net/cs2)** and **[The Finals](https://www.reachthefinals.com/)**. +- **[Valorant](https://playvalorant.com/)** +- **[League of Legends](https://www.leagueoflegends.com/)** +- **[Overwatch 2](https://playoverwatch.com/)** +- **[CSGO 2](https://www.counter-strike.net/cs2)** +- **[The Finals](https://www.reachthefinals.com/)** # Usage -## Releases +## Installation -[Releases](https://github.com/Flowtter/crispy/releases) are available for windows and linux. +Download the latest release for your operating system from the [Releases page](https://github.com/Flowtter/crispy/releases). Releases are available for Windows and Linux. ## Setup -Firstly, you will have to install [ffmpeg](https://ffmpeg.org/about.html) (ffprobe is also required).\ -Once unzip, you can run the setup.[sh|bat] file.\ -Then you can add your videos in the mp4/mp3 format in the resources folder. +1. **Install Dependencies** + + - Install [FFmpeg](https://ffmpeg.org/about.html). Ensure that both `ffmpeg` and `ffprobe` are installed and added to your system's PATH. + +2. **Run the Setup** + + - Unzip the downloaded release. + - Run the appropriate setup script: + - For Windows: `setup.bat` + - For Linux: `setup.sh` + +3. **Add your Videos and Musics** + + - Place your video files (`.mp4` format) and audio files (`.mp3` format) into the `resources` folder. ## Configuration -You can configure the algorithm in the settings.json file.\ -It's where you'll change the game.\ -config example: +Customize the application by editing the `settings.json` file. This is where you can adjust algorithm settings and select the game you are processing. + +### Configuration Example ```json { @@ -51,22 +68,21 @@ config example: } ``` -The following settings are adjustable: +### Available Settings -- neural-network - - confidence: The confidence level used by the neural network. Lower values will include more frames, but might include some false positives. -- clip - - framerate: The framerate of the clip at which the neural network will be applied. Higher values will include more frames, but will take more time to process. - - second-before: Seconds of gameplay included before the highlight. - - second-after: Seconds of gameplay included after the highlight. - - second-between-kills: Transition time between highlights. If the time between two highlights is less than this value, the both highlights will be merged. -- game: Chosen game (either "valorant", "overwatch" or "csgo2") -- stretch: This is an option in case you're playing on a 4:3 resolution but your clips are recorded in 16:9. +- **neural-network** + - **confidence**: The confidence threshold used by the neural network. Lower values may include more frames but might introduce false positives. +- **clip** + - **framerate**: The framerate at which the neural network processes the clips. Higher values include more frames but increase processing time. + - **second-before**: Number of seconds to include before the highlight. + - **second-after**: Number of seconds to include after the highlight. + - **second-between-kills**: Maximum time between kills to be considered part of the same highlight. If the time between two highlights is less than this value, they will be merged. +- **stretch**: Set to `true` if you're playing on a 4:3 resolution but your clips are recorded in 16:9. +- **game**: The game you are processing. Options are `"valorant"`, `"overwatch"`, `"csgo2"`, `"the-finals"`, `"league-of-legends"`. -### Recommended settings +### Recommended Settings -I recommend you to use the trials and errors method to find the best settings for your videos.\ -Here are some settings that I found to work well for me: +It's recommended to experiment with the settings to achieve the best results for your videos. Below are some configurations that have worked well: #### Valorant @@ -86,7 +102,7 @@ Here are some settings that I found to work well for me: } ``` -#### Overwatch +#### Overwatch 2 ```json { @@ -104,7 +120,7 @@ Here are some settings that I found to work well for me: } ``` -#### CSGO2 +#### CSGO 2 ```json { @@ -124,10 +140,9 @@ Here are some settings that I found to work well for me: #### The Finals -Since the finals is not using the neural network, the settings are a bit different.\ -The principal problem is that the OCR makes the code very slow.\ -So I recommend using a framerate of 4 which gave me the ratio between speed and results.\ -Though, If you want to have better results, you can try to increase the framerate, I would recommend a maximum of 8. +Since **The Finals** does not use the neural network, the settings differ slightly. The application uses image recognition and Optical Character Recognition (OCR) to detect kills. Due to the computational demands of OCR, processing can be slow. + +**Recommendation:** Use a framerate of 4 to balance speed and accuracy. Increasing the framerate may improve results but will significantly increase processing time (a maximum of 8 is suggested). ```json { @@ -138,119 +153,182 @@ Though, If you want to have better results, you can try to increase the framerat "second-between-kills": 6 }, "stretch": false, - "game": "thefinals" + "game": "the-finals" +} +``` + +#### League of Legends + +For **League of Legends**, image recognition is used to detect kills. A lower framerate is sufficient because kill indicators remain on the screen for an extended period. + +```json +{ + "clip": { + "framerate": 4, + "second-before": 8, + "second-after": 0, + "second-between-kills": 10 + }, + "stretch": false, + "game": "league-of-legends" } ``` -## Run +## Running the Application + +After configuration, run the application using the appropriate script: + +- For Windows: `run.bat` +- For Linux: `run.sh` + +# Frontend Overview -You can now run the application with the run.[sh|bat] file. +The frontend is a web application that allows you to interact with the Crispy algorithm and customize your video montages. It consists of five main sections: -# Frontend explanation +1. **Clips** +2. **Segments** +3. **Music** +4. **Effects** +5. **Result** -The frontend is a web-application that allows you to add options to the Crispy algorithm.\ -It has 5 views: +## Clips -- Clips -- Segments -- Musics -- Effects -- Result +In the **Clips** section, you can: -### Clips +- **View and manage your video clips**: See a list of your uploaded videos. +- **Rearrange clips**: Drag and drop to reorder your clips. +- **Select clips for segmentation**: Use the "Show" toggle to select which videos to process. +- **Add custom effects**: Apply effects to individual clips. +- **Generate segments**: After making your selections, click on **Generate Segments** to create highlights. -In the clips view, you can see the list of your videos.\ -You can rearrange them by dragging and dropping them.\ -Select the videos you want to make segments of by selecting "show" for that video \ -Select the videos you want in the montage and add customs effects for a single clip.\ -Once you've made your selection, you can click on `generate segments` to create the segments. +## Segments -### Segments +In the **Segments** section, you can: -In the segments view, you can see the list of your segments.\ -Each segment is a gameplay highlight chosen by the algorithm. \ -You can select "hide" on a segment to exclude that segment from the final result. +- **View generated segments**: See the list of highlights extracted by the algorithm. +- **Include or exclude segments**: Use the "Hide" toggle to exclude segments from the final montage. -### Musics +## Music -In the music view, you can see the list of your music.\ -This is the music that will be played in the final result video. \ -You can select "hide" for songs you don't want and you can you can rearrange them by dragging and dropping them. +In the **Music** section, you can: -### Effects +- **Manage your music tracks**: View the list of music files added to the `resources` folder. +- **Select music for the montage**: Use the "Hide" toggle to exclude tracks. +- **Rearrange music**: Drag and drop to set the order of music tracks in your montage. -In the effects view, you can see the list of your effects.\ -Those effects are applied to the whole video.\ -Yet the clips' effects override the global effects.\ -The following effects are available to use: +## Effects -- blur -- hflip -- vflip -- brightness -- saturation -- zoom -- grayscale +In the **Effects** section, you can: -### Result +- **Apply global effects**: Add effects that will be applied to the entire video. +- **Override with clip effects**: Note that effects applied to individual clips will override these global effects. +- **Available effects**: + - Blur + - Horizontal Flip (`hflip`) + - Vertical Flip (`vflip`) + - Brightness + - Saturation + - Zoom + - Grayscale -In the result view, you can see the result of your montage. +## Result + +In the **Result** section, you can: + +- **Preview your montage**: See the final assembled video with all clips, music, and effects applied. # Q&A -### **Q:** Why are some games not using the neural network? +#### Q: I get an Axios error when I load the web page. + +**A:** This error likely occurs because the backend is not running. Please ensure that the backend is operational. Look for the following messages in your console: + +```bash +INFO: Started server process [XXXX] +INFO: Waiting for application startup. +INFO: Adding X highlights, this may take a while. +WARNING: Wait for `Application startup complete.` to use Crispy. +INFO: Application startup complete. +INFO: Uvicorn running on http://127.0.0.1:7821 (Press CTRL+C to quit) +``` + +If you don't see the `Application startup complete` message, the backend is still initializing or there is an error. Wait until it completes or check for errors. If issues persist, refer to [existing issues](https://github.com/Flowtter/crispy/issues?q=is%3Aissue+error), or open a new issue if necessary. -**A:** To detect highlights in a video-game, the neural-network searches for things that always happen in a highlight.\ -For example, in Overwatch, a kill is symbolized by a red skull. So the neural-network will search for red skulls in the frames.\ -Unfortunately, not all games have such things.\ -The finals, for example, is a game where you don't have any symbol to represent a kill.\ -So for those games, the neural-network is not used. Instead, we're using an OCR to detect the killfeed.\ -The OCR is definitely not as efficient as the neural-network, slow, and depends on the quality of the video.\ -But it's the best we can do for now. +### Q: How can I change the game ? -### **Q:** Why are some games not supported? +**A:** To change the game setting in Crispy, follow these steps: -**A:** The neural-network has simply not been trained for those games.\ -If you want to add support for a game, you can train the neural-network yourself and then make a pull request.\ -A tutorial is available [here](https://github.com/Flowtter/crispy/tree/master/crispy-api/dataset). +1. **Stop the application.** +2. **Delete the `.data` folder and the `session` folder** in the Crispy directory. +3. **Edit `settings.json`** to specify the new game under the `"game"` key. +4. **Add and remove any necessary files in the resources folder** (e.g., new game dataset). +5. **Restart the application.** -### **Q:** In CSGO2, I moved the UI, and the kills are not detected anymore. What can I do? +These steps will reset the game configuration, and the new game will be applied upon starting Crispy. -**A:** Unfortunately, there is nothing you can do.\ -The neural-network is trained to detect kills in the default UI.\ -I'm planning to add support for custom UI in the future, but this is definitely not a priority. +#### Q: Why are some games not using the neural network? -### **Q:** Why is the algorithm so slow on the finals? +**A:** The neural network is designed to detect consistent visual cues that signify highlights, such as specific icons or symbols that appear during kills. Some games do not have these consistent indicators. For those games, we use alternative methods like image recognition or Optical Character Recognition (OCR). While these methods can be slower and less accurate, they are the best available options for games without consistent visual cues. -**A:** The algorithm is slow because we're using an OCR to detect the killfeed.\ -This makes the algorithm very slow, which is why I recommend using a lower framerate for the finals.\ -**In most scenarios**, a framerate of 4 is enough to detect all the kills, and increasing the framerate will only increase the processing time, without improving the results. +#### Q: Why are some games not supported? + +**A:** The neural network requires training specific to each game. If a game is not supported, it means the neural network has not been trained for it yet. You can contribute by training the neural network for the game and submitting a pull request. A tutorial is available [here](https://github.com/Flowtter/crispy/tree/master/crispy-api/dataset). + +#### Q: In CSGO 2, I moved the UI, and the kills are not detected anymore. What can I do? + +**A:** Currently, the neural network is trained to detect kills based on the default UI layout. Custom UI configurations are not supported at this time. Support for custom UIs may be added in future updates. + +#### Q: Why is the algorithm so slow on The Finals? + +**A:** The algorithm is slow because it relies on OCR to detect the kill feed, which is computationally intensive. To improve processing time, use a lower framerate (e.g., 4 frames per second). Increasing the framerate will significantly increase processing time without substantial improvement in results. # Contributing -Every contribution is welcome. +We welcome contributions from the community! -## Setup pre-commit +## Setting Up Pre-Commit Hooks -First install `pre-commit` by running: +To maintain code quality, we use `pre-commit` hooks. Follow these steps to set them up: -```sh -pip install pre-commit -``` +1. **Install Pre-Commit** -Then to install the git hook run: + ```sh + pip install pre-commit + ``` -```sh -pre-commit install -t pre-commit -t commit-msg -``` +2. **Install Git Hooks** + + ```sh + pre-commit install -t pre-commit -t commit-msg + ``` + + This will set up `pre-commit` to run automatically on every `git commit`. + +## Development Setup -Now `pre-commit` will run on every `git commit`. +To get started with development: -## Start +1. **Frontend** -- `cd crispy-frontend && yarn && yarn dev` -- `cd crispy-backend && pip install -Ir requirements-dev.txt && python -m api` + ```sh + cd crispy-frontend + yarn + yarn dev + ``` -## Test +2. **Backend** -- `cd crispy-api && pytest` + ```sh + cd crispy-backend + pip install -Ir requirements-dev.txt + python -m api + ``` + +## Running Tests + +To run the test suite: + +```sh +cd crispy-api +pytest +``` diff --git a/crispy-api/.gitignore b/crispy-api/.gitignore index a882d1e..639dd4d 100644 --- a/crispy-api/.gitignore +++ b/crispy-api/.gitignore @@ -7,3 +7,5 @@ dataset/* .dataset/ merged* + +.venv diff --git a/crispy-api/api/__init__.py b/crispy-api/api/__init__.py index 968a8da..93b0165 100644 --- a/crispy-api/api/__init__.py +++ b/crispy-api/api/__init__.py @@ -23,8 +23,10 @@ VIDEOS, ) from api.tools.AI.network import NeuralNetwork +from api.tools.enums import SupportedGames from api.tools.filters import apply_filters # noqa from api.tools.setup import handle_highlights, handle_musics +from api.tools.utils import download_champion_images ENCODERS_BY_TYPE[ObjectId] = str @@ -40,7 +42,9 @@ def init_app(debug: bool) -> FastAPI: if debug: - logging.getLogger("uvicorn").setLevel(logging.DEBUG) + logger = logging.getLogger("uvicorn") + logger.setLevel(logging.DEBUG) + logger.debug("Crispy started in debug mode") return FastAPI(debug=True) return FastAPI(docs_url=None, redoc_url=None, openapi_url=None) @@ -71,6 +75,8 @@ def is_tool_installed(ffmpeg_tool: str) -> None: async def setup_crispy() -> None: await handle_musics(MUSICS) await handle_highlights(VIDEOS, GAME, framerate=FRAMERATE) + if GAME == SupportedGames.LEAGUE_OF_LEGENDS: + await download_champion_images() @app.exception_handler(HTTPException) @@ -87,4 +93,9 @@ def http_exception(request: Request, exc: HTTPException) -> JSONResponse: ) +@app.get("/health") +def health() -> dict: + return {"status": "ok"} + + from api.routes import filters, highlight, music, result, segment # noqa diff --git a/crispy-api/api/config.py b/crispy-api/api/config.py index b06e4e1..beca609 100644 --- a/crispy-api/api/config.py +++ b/crispy-api/api/config.py @@ -1,11 +1,14 @@ import json import os +import warnings import easyocr from starlette.config import Config from api.tools.enums import SupportedGames +warnings.filterwarnings("ignore", category=FutureWarning) + config = Config(".env") DEBUG = config("DEBUG", cast=bool, default=False) @@ -25,6 +28,8 @@ VIDEOS = os.path.join(RESOURCES, "videos") MUSICS = os.path.join(RESOURCES, "musics") +LEAGUE_IMAGES_PATH = os.path.join(os.getcwd(), "league-images") + DATASET_PATH = "dataset" DATASET_VALUES_PATH = os.path.join(DATASET_PATH, "dataset-values.json") DATASET_CSV_PATH = os.path.join(DATASET_PATH, "result.csv") @@ -43,7 +48,7 @@ __clip = __settings.get("clip") if __clip is None: - raise KeyError(f"clips not found in {SETTINGS_JSON}") + raise KeyError(f"No clips in the {SETTINGS_JSON}") FRAMERATE = __clip.get("framerate", 8) OFFSET = __clip.get("second-between-kills", 0) * FRAMERATE @@ -53,10 +58,13 @@ GAME = __settings.get("game") if GAME is None: raise KeyError("game not found in settings.json") - if GAME.upper() not in [game.name for game in SupportedGames]: + if GAME.upper().replace("-", "_") not in [game.name for game in SupportedGames]: raise ValueError(f"game {GAME} not supported") - USE_NETWORK = GAME not in [SupportedGames.THEFINALS] + USE_NETWORK = GAME not in [ + SupportedGames.THE_FINALS, + SupportedGames.LEAGUE_OF_LEGENDS, + ] __neural_network = __settings.get("neural-network") if __neural_network is None and USE_NETWORK: diff --git a/crispy-api/api/models/highlight.py b/crispy-api/api/models/highlight.py index bc94b51..a4dce0d 100644 --- a/crispy-api/api/models/highlight.py +++ b/crispy-api/api/models/highlight.py @@ -298,6 +298,18 @@ def post_process(image: Image) -> Image: and killfeed_state ) + async def extract_league_of_legends_images( + self, framerate: int = 4, stretch: bool = False + ) -> bool: + def post_process(image: Image) -> Image: + return image + + return await self.extract_images( + post_process, + Box(1795, 240, 133, 245, 0, stretch, from_center=False), + framerate=framerate, + ) + async def extract_images_from_game( self, game: SupportedGames, framerate: int = 4, stretch: bool = False ) -> bool: @@ -307,8 +319,10 @@ async def extract_images_from_game( return await self.extract_valorant_images(framerate, stretch) elif game == SupportedGames.CSGO2: return await self.extract_csgo2_images(framerate, stretch) - elif game == SupportedGames.THEFINALS: + elif game == SupportedGames.THE_FINALS: return await self.extract_the_finals_images(framerate, stretch) + elif game == SupportedGames.LEAGUE_OF_LEGENDS: + return await self.extract_league_of_legends_images(framerate, stretch) else: raise NotImplementedError(f"game {game} not supported") @@ -458,9 +472,7 @@ async def scale_video( if not os.path.exists(self.path): raise FileNotFoundError(f"{self.path} not found") - logger.warning( - f"WARNING:Scaling video {self.path}, saving a backup in ./backup" - ) + logger.warning(f"Scaling video {self.path}, saving a backup in ./backup") if not os.path.exists(backup): os.makedirs(backup) diff --git a/crispy-api/api/tools/enums.py b/crispy-api/api/tools/enums.py index b72edde..b39261c 100644 --- a/crispy-api/api/tools/enums.py +++ b/crispy-api/api/tools/enums.py @@ -5,4 +5,5 @@ class SupportedGames(str, Enum): VALORANT = "valorant" OVERWATCH = "overwatch" CSGO2 = "csgo2" - THEFINALS = "thefinals" + THE_FINALS = "the-finals" + LEAGUE_OF_LEGENDS = "league-of-legends" diff --git a/crispy-api/api/tools/setup.py b/crispy-api/api/tools/setup.py index 24e70ff..a46f3c0 100644 --- a/crispy-api/api/tools/setup.py +++ b/crispy-api/api/tools/setup.py @@ -78,7 +78,7 @@ def handle_specific_game( game: SupportedGames, framerate: int = 4, ) -> None: - if game == SupportedGames.THEFINALS: + if game == SupportedGames.THE_FINALS: handle_the_finals(new_highlights, framerate) diff --git a/crispy-api/api/tools/utils.py b/crispy-api/api/tools/utils.py index 8fe2f9b..2dd0613 100644 --- a/crispy-api/api/tools/utils.py +++ b/crispy-api/api/tools/utils.py @@ -1,8 +1,81 @@ -from typing import Any, Dict, List +import asyncio +import logging +import os +from io import BytesIO +from typing import Any, Dict, List, Union +import aiohttp +import requests +from PIL import Image + +from api.config import LEAGUE_IMAGES_PATH from api.models.highlight import Highlight from api.tools.job_scheduler import JobScheduler +logger = logging.getLogger("uvicorn") + +# URL for champion data and image base +base_url = "https://ddragon.leagueoflegends.com" +version_url = f"{base_url}/realms/na.json" +json_url = f"{base_url}/cdn/VERSION/data/en_US/champion.json" +image_base_url = f"{base_url}/cdn/VERSION/img/champion" + + +def get_league_of_legends_version() -> Union[str, None]: + response = requests.get(version_url) + if response.status_code != 200: # pragma: no cover + logger.error("Cannot download the league of legends patch version") + return None + data = response.json() + if "n" not in data or "champion" not in data["n"]: # pragma: no cover + logger.error("Could not extract the league of legends patch version") + return str(data["n"]["champion"]) + + +async def fetch_image(session: aiohttp.ClientSession, url: str, path: str) -> None: + async with session.get(url) as response: + if response.status != 200: # pragma: no cover + logger.error(f"Cannot download image from {url}") + return + img = Image.open(BytesIO(await response.read())) + img.save(path) + + +async def download_champion_images(path: str = LEAGUE_IMAGES_PATH) -> None: + version = get_league_of_legends_version() + if not version: # pragma: no cover + logger.error("Could not download the league of legends images") + return + async with aiohttp.ClientSession() as session: + champions = await session.get(json_url.replace("VERSION", version)) + if champions.status != 200: # pragma: no cover + logger.error("Cannot download the league of legends champion.json") + return + champion_names = (await champions.json())["data"].keys() + + if not os.path.exists(path): # pragma: no cover + os.makedirs(path) + + logger.info("Downloading league of legends champion images") + tasks = [] + for champion in champion_names: + image_path = os.path.join(path, f"{champion}.png") + if os.path.exists(image_path): + continue + image_url = f"{image_base_url.replace('VERSION', version)}/{champion}.png" + tasks.append(fetch_image(session, image_url, image_path)) + + await asyncio.gather(*tasks) + + for champion in champion_names: + image_path = os.path.join(path, f"{champion}.png") + if os.path.exists(image_path): + img = Image.open(image_path) + img = img.resize((41, 41), Image.ANTIALIAS) + img.save(image_path) + + logger.info("Done downloading and resizing league champions") + def get_all_jobs_from_highlights( job_scheduler: JobScheduler, highlights: List[Highlight] diff --git a/crispy-api/api/tools/video.py b/crispy-api/api/tools/video.py index b96b5ca..d53f4d4 100644 --- a/crispy-api/api/tools/video.py +++ b/crispy-api/api/tools/video.py @@ -2,12 +2,13 @@ import logging import os from collections import Counter -from typing import List, Tuple +from typing import Any, Dict, List, Tuple, Union +import cv2 import numpy as np from PIL import Image -from api.config import GAME, READER +from api.config import GAME, LEAGUE_IMAGES_PATH, READER from api.models.highlight import Highlight from api.models.segment import Segment from api.tools.AI.network import NeuralNetwork @@ -16,6 +17,8 @@ logger = logging.getLogger("uvicorn") +LEAGUE_CHAMPIONS: List[Dict[str, Any]] = [] + def _image_to_list_format(path: str) -> List[int]: """ @@ -153,16 +156,81 @@ def _create_the_finals_query_array(highlight: Highlight) -> List[int]: return queries +def _create_league_of_legends_query_array(highlight: Highlight) -> List[int]: + global LEAGUE_CHAMPIONS + if not LEAGUE_CHAMPIONS: + for image_path in sorted(os.listdir(LEAGUE_IMAGES_PATH)): + LEAGUE_CHAMPIONS.append( + { + "image": cv2.imread(os.path.join(LEAGUE_IMAGES_PATH, image_path)), + "name": image_path.split(".")[0], + } + ) + + images = sorted(os.listdir(highlight.images_path)) + images.sort() + + queries = [] + yellow_rgb = np.array([54, 216, 213]) + yellow_threshold = 115 + + for j, image_name in enumerate(images): + full_image_path = os.path.join(highlight.images_path, image_name) + image = cv2.imread(full_image_path, cv2.IMREAD_COLOR) + + is_kill = False + kill_spots = [] + + regions = { + 1: image[7:51, 46:48], + 2: image[69:113, 46:48], + 3: image[132:176, 46:48], + 4: image[195:239, 46:48], + } + + enemy_region = { + 1: image[8:49, 84:125], + 2: image[70:111, 84:125], + 3: image[133:174, 84:125], + 4: image[196:237, 84:125], + } + + for region_index, region in regions.items(): + avg_color = np.mean(region.reshape(-1, 3), axis=0) + color_distance = np.linalg.norm(avg_color - yellow_rgb) + + if color_distance < yellow_threshold: + enemy_image = enemy_region[region_index] + + max_score = -1 + for champion in LEAGUE_CHAMPIONS: + score = cv2.matchTemplate( + enemy_image, champion["image"], cv2.TM_CCOEFF_NORMED + ) + if score > max_score: + max_score = score + + if max_score > 0.75: + is_kill = True + kill_spots.append(region_index) + + if is_kill: + queries.append(j) + return queries + + def _get_query_array( - neural_network: NeuralNetwork, + neural_network: Union[NeuralNetwork, None], highlight: Highlight, confidence: float, game: SupportedGames, ) -> List[int]: if neural_network: return _create_query_array(neural_network, highlight, confidence) - if game == SupportedGames.THEFINALS: + if game == SupportedGames.THE_FINALS: return _create_the_finals_query_array(highlight) + if game == SupportedGames.LEAGUE_OF_LEGENDS: + return _create_league_of_legends_query_array(highlight) raise ValueError( f"No neural network for game {game} and no custom query array" ) # pragma: no cover @@ -231,7 +299,7 @@ def _post_process_query_array( async def extract_segments( highlight: Highlight, - neural_network: NeuralNetwork, + neural_network: Union[NeuralNetwork, None], confidence: float, framerate: int, offset: int, diff --git a/crispy-api/requirements-dev.txt b/crispy-api/requirements-dev.txt index 51da8ad..8984bff 100644 --- a/crispy-api/requirements-dev.txt +++ b/crispy-api/requirements-dev.txt @@ -7,3 +7,4 @@ pytest-cov==4.0.0 flake8 httpx mutagen +types-requests==2.32.0.20241016 diff --git a/crispy-api/requirements.txt b/crispy-api/requirements.txt index 01093ea..70b159c 100644 --- a/crispy-api/requirements.txt +++ b/crispy-api/requirements.txt @@ -21,3 +21,5 @@ scipy==1.8.0 pydub==0.25.1 montydb==2.4.0 easyocr==1.7.1 +aiohttp==3.10.10 +requests==2.32.3 diff --git a/crispy-api/settings.json b/crispy-api/settings.json index cefc18a..6efc3c4 100644 --- a/crispy-api/settings.json +++ b/crispy-api/settings.json @@ -4,9 +4,9 @@ }, "clip": { "framerate": 8, - "second-before": 3, + "second-before": 4, "second-after": 0.5, - "second-between-kills": 1 + "second-between-kills": 3 }, "stretch": false, "game": "valorant" diff --git a/crispy-api/tests/assets b/crispy-api/tests/assets index 87e16dd..4dc1aeb 160000 --- a/crispy-api/tests/assets +++ b/crispy-api/tests/assets @@ -1 +1 @@ -Subproject commit 87e16dd0c1c321c16562d4b5afef0fe47e71621f +Subproject commit 4dc1aebf35b9aef5705ee053a500907ec81f2318 diff --git a/crispy-api/tests/constants.py b/crispy-api/tests/constants.py index 4615bde..1333687 100644 --- a/crispy-api/tests/constants.py +++ b/crispy-api/tests/constants.py @@ -14,7 +14,8 @@ MAIN_VIDEO_1440 = os.path.join(VIDEOS_PATH, "main-video-1440.mp4") MAIN_VIDEO_OVERWATCH = os.path.join(VIDEOS_PATH, "main-video-overwatch.mp4") MAIN_VIDEO_CSGO2 = os.path.join(VIDEOS_PATH, "main-video-csgo2.mp4") -MAIN_VIDEO_THEFINALS = os.path.join(VIDEOS_PATH, "main-video-thefinals.mp4") +MAIN_VIDEO_THE_FINALS = os.path.join(VIDEOS_PATH, "main-video-the-finals.mp4") +MAIN_VIDEO_LEAGUE = os.path.join(VIDEOS_PATH, "main-video-league.mp4") MAIN_SEGMENT = os.path.join(VIDEOS_PATH, "main-video-segment.mp4") DATASET_VALUES_PATH = os.path.join(ROOT_ASSETS, "dataset-values.json") diff --git a/crispy-api/tests/models/highlight.py b/crispy-api/tests/models/highlight.py index 6b993fc..6624df5 100644 --- a/crispy-api/tests/models/highlight.py +++ b/crispy-api/tests/models/highlight.py @@ -9,9 +9,10 @@ from api.tools.enums import SupportedGames from tests.constants import ( MAIN_VIDEO_CSGO2, + MAIN_VIDEO_LEAGUE, MAIN_VIDEO_NO_AUDIO, MAIN_VIDEO_OVERWATCH, - MAIN_VIDEO_THEFINALS, + MAIN_VIDEO_THE_FINALS, ) @@ -157,9 +158,10 @@ async def test_segment_video_segments_are_removed(highlight, tmp_path): (None, SupportedGames.VALORANT, 8), (MAIN_VIDEO_OVERWATCH, SupportedGames.OVERWATCH, 1.5), (MAIN_VIDEO_CSGO2, SupportedGames.CSGO2, 1.5), - (MAIN_VIDEO_THEFINALS, SupportedGames.THEFINALS, 0.75), + (MAIN_VIDEO_THE_FINALS, SupportedGames.THE_FINALS, 0.75), + (MAIN_VIDEO_LEAGUE, SupportedGames.LEAGUE_OF_LEGENDS, 1.5), ], - ids=["valorant", "overwatch", "csgo2", "thefinals"], + ids=["valorant", "overwatch", "csgo2", "the-finals", "league-of-legends"], ) async def test_extract_game_images(highlight, highlight_path, game, rate): if highlight_path is not None: diff --git a/crispy-api/tests/tools/setup.py b/crispy-api/tests/tools/setup.py index 808d5cc..12da89a 100644 --- a/crispy-api/tests/tools/setup.py +++ b/crispy-api/tests/tools/setup.py @@ -10,7 +10,7 @@ MAIN_VIDEO, MAIN_VIDEO_NO_AUDIO, MAIN_VIDEO_STRETCH, - MAIN_VIDEO_THEFINALS, + MAIN_VIDEO_THE_FINALS, ) @@ -91,10 +91,10 @@ async def test_handle_highlights_the_finals(tmp_path): tmp_resources = os.path.join(tmp_path, "resources") os.mkdir(tmp_resources) - shutil.copy(MAIN_VIDEO_THEFINALS, tmp_resources) + shutil.copy(MAIN_VIDEO_THE_FINALS, tmp_resources) assert await handle_highlights( - tmp_resources, SupportedGames.THEFINALS, session=tmp_session + tmp_resources, SupportedGames.THE_FINALS, session=tmp_session ) assert Highlight.count_documents() == 1 diff --git a/crispy-api/tests/tools/utils.py b/crispy-api/tests/tools/utils.py index c4cb86a..1d24833 100644 --- a/crispy-api/tests/tools/utils.py +++ b/crispy-api/tests/tools/utils.py @@ -1,4 +1,10 @@ -from api.tools.utils import levenstein_distance +import os +import shutil +import time + +from PIL import Image + +from api.tools.utils import download_champion_images, levenstein_distance async def test_levenshtein_distance(): @@ -17,3 +23,23 @@ async def test_levenshtein_distance(): assert levenstein_distance("t", "test") == 3 assert levenstein_distance("test", "") == 4 assert levenstein_distance("", "test") == 4 + + +async def test_download_champion_images(tmp_path): + start_time = time.time() + await download_champion_images(tmp_path) + first_download_time = time.time() - start_time + + assert os.path.exists(tmp_path) + assert len(os.listdir(tmp_path)) > 100 + image_path = os.path.join(tmp_path, os.listdir(tmp_path)[0]) + image = Image.open(image_path) + assert image.size == (41, 41) + + start_time = time.time() + await download_champion_images(tmp_path) + second_download_time = time.time() - start_time + + assert second_download_time < first_download_time + + shutil.rmtree(tmp_path) diff --git a/crispy-api/tests/tools/video.py b/crispy-api/tests/tools/video.py index c1a8e5a..129aac8 100644 --- a/crispy-api/tests/tools/video.py +++ b/crispy-api/tests/tools/video.py @@ -7,7 +7,7 @@ from api.models.segment import Segment from api.tools.enums import SupportedGames from api.tools.video import extract_segments -from tests.constants import MAIN_VIDEO, MAIN_VIDEO_THEFINALS +from tests.constants import MAIN_VIDEO, MAIN_VIDEO_LEAGUE, MAIN_VIDEO_THE_FINALS @pytest.mark.parametrize( @@ -232,11 +232,11 @@ async def test_extract_segment_recompile_global( async def test_extract_segments_the_finals(highlight): - highlight.path = MAIN_VIDEO_THEFINALS + highlight.path = MAIN_VIDEO_THE_FINALS highlight.usernames = ["heximius", "sxr_raynox", "srx", "raynox"] highlight = highlight.save() - await highlight.extract_images_from_game(SupportedGames.THEFINALS, 8) + await highlight.extract_images_from_game(SupportedGames.THE_FINALS, 8) timestamps, _ = await extract_segments( highlight, None, @@ -245,7 +245,7 @@ async def test_extract_segments_the_finals(highlight): offset=0, frames_before=0, frames_after=8, - game=SupportedGames.THEFINALS, + game=SupportedGames.THE_FINALS, ) assert timestamps == [ (5.5, 7.875), @@ -256,3 +256,23 @@ async def test_extract_segments_the_finals(highlight): ] shutil.rmtree(highlight.images_path) shutil.rmtree(os.path.join(os.path.dirname(highlight.images_path), "usernames")) + + +async def test_extract_segments_league_of_legends(highlight): + highlight.path = MAIN_VIDEO_LEAGUE + highlight.save() + + await highlight.extract_images_from_game(SupportedGames.LEAGUE_OF_LEGENDS, 4) + timestamps, _ = await extract_segments( + highlight, + None, + confidence=0, + framerate=4, + offset=0, + frames_before=0, + frames_after=8, + game=SupportedGames.LEAGUE_OF_LEGENDS, + ) + assert timestamps == [(11.5, 14.75)] + + shutil.rmtree(highlight.images_path) diff --git a/crispy-frontend/.gitignore b/crispy-frontend/.gitignore index da93220..7f1a9d2 100644 --- a/crispy-frontend/.gitignore +++ b/crispy-frontend/.gitignore @@ -2,3 +2,5 @@ /public/build/ .DS_Store +.yarn/ +.yarnrc.yml diff --git a/crispy-frontend/public/global.css b/crispy-frontend/public/global.css index b92c5af..9b1da80 100644 --- a/crispy-frontend/public/global.css +++ b/crispy-frontend/public/global.css @@ -3,6 +3,7 @@ body { position: relative; width: 100%; height: 100%; + background-color: #131d35; } body { diff --git a/crispy-frontend/src/App.svelte b/crispy-frontend/src/App.svelte index bc517f3..30e8d77 100644 --- a/crispy-frontend/src/App.svelte +++ b/crispy-frontend/src/App.svelte @@ -57,7 +57,7 @@ } } - @media (max-width: 700px) { + @media (max-width: 900px) { .content { border-radius: 0% !important; } @@ -76,6 +76,7 @@
-
- -
-
- -
-
- -
-
- {#key mode} - {#if mode === "clips"} - - {:else if mode === "segments"} - - {:else if mode === "result"} - - {:else if mode === "musics"} - - {:else if mode === "effects"} - - {/if} - {/key} +
+ +
+
+ +
+
+ + {#if healthy} +
+
+ {#key mode} + {#if mode === "clips"} + + {:else if mode === "segments"} + + {:else if mode === "result"} + + {:else if mode === "musics"} + + {:else if mode === "effects"} + + {/if} + {/key} +
+ {:else} + + {/if}
-
diff --git a/crispy-frontend/src/lib/components/Filters.svelte b/crispy-frontend/src/lib/components/Filters.svelte index f6825b3..9d85f56 100644 --- a/crispy-frontend/src/lib/components/Filters.svelte +++ b/crispy-frontend/src/lib/components/Filters.svelte @@ -3,8 +3,8 @@ background-color: transparent; } button { - margin-top: -60px; - margin-bottom: 20px; + margin-top: -55px; + margin-bottom: 10px; float: right; text-align: center; padding: 12px 20px; diff --git a/crispy-frontend/src/lib/components/Loader.svelte b/crispy-frontend/src/lib/components/Loader.svelte new file mode 100644 index 0000000..876e725 --- /dev/null +++ b/crispy-frontend/src/lib/components/Loader.svelte @@ -0,0 +1,36 @@ + + + + +{#if !isHealthy} +
+
+
+{/if} diff --git a/crispy-frontend/src/lib/components/Menubar.svelte b/crispy-frontend/src/lib/components/Menubar.svelte index f699745..a488dd7 100644 --- a/crispy-frontend/src/lib/components/Menubar.svelte +++ b/crispy-frontend/src/lib/components/Menubar.svelte @@ -56,7 +56,7 @@ .selected { background-color: var(--primary); } - @media (max-width: 700px) { + @media (max-width: 900px) { .main { flex-direction: column; border-radius: 0% !important; @@ -105,6 +105,7 @@ export let mode; export let generating; + export let healthy; const waitForJobs = async (url, id, msg) => { let count = -1; @@ -152,6 +153,7 @@ }; async function generateSegments() { + if (!healthy) { return } if (generating) { globalError("Already generating."); return; @@ -215,7 +217,7 @@ await waitForJobs(API_URL + "/results/generate/highlights/status", toastId, "Generating Results!"); toast.pop(0); globalSuccess("All results generated! Generating final video..."); - globalInfo("Generating Final video! This may take a while...", { + globalInfo("Generating the final video! This may take a while...", { initial: 0, dismissable: false, }); @@ -240,6 +242,7 @@ } function changeMenu(newMode) { + if (!healthy) { return } if (generating) { globalError("Wait for current job to finish."); return; @@ -251,7 +254,7 @@ } -
+