From a5d917f72fe195c06efa2bee36f4a396b0c7acf0 Mon Sep 17 00:00:00 2001 From: grey Date: Tue, 24 Sep 2024 19:01:48 -0700 Subject: [PATCH] Add Windows support + docs, closes #22, closes #25 (#23) * feat: Add Windows Support, closes #22 * ci: GitHub Actions Runner * Update scripts/models.ps1 Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> * Update scripts/build/debug.sh Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> * ci: Create `release.ps1` * ci: Exclude `cl` build on Ubuntu * ci: Remove hardcoded `cl` location * docs: Fix documentation, closes #25 * ci: Build release `llama.cpp` in `release.sh` * fix: Enable WebView 2 * Add `WebView2` NuGet Package * ci: Create run scripts for local testing * fix: Force package installation by script --------- Co-authored-by: sourcery-ai[bot] <58596630+sourcery-ai[bot]@users.noreply.github.com> --- .github/workflows/cmake.yml | 24 +++-- .gitmodules | 8 +- CMakeLists.txt | 155 +++++++++++++++++++++------------ README.md | 96 ++------------------ docs/features.md | 6 +- docs/getting_started.md | 29 ------ docs/index.md | 7 +- docs/installation.md | 38 +++++++- docs/practical_applications.md | 22 ----- docs/requirements.md | 9 +- docs/supported_platforms.md | 14 --- docs/thank_you.md | 2 +- docs/usage.md | 26 +++++- llama.cpp | 2 +- scripts/build/debug.ps1 | 25 ++++++ scripts/build/release.ps1 | 24 +++++ scripts/build/release.sh | 4 +- scripts/clean.ps1 | 12 +++ scripts/models.ps1 | 16 ++++ scripts/run/debug.ps1 | 1 + scripts/run/release.ps1 | 1 + 21 files changed, 287 insertions(+), 234 deletions(-) delete mode 100644 docs/getting_started.md delete mode 100644 docs/practical_applications.md delete mode 100644 docs/supported_platforms.md create mode 100644 scripts/build/debug.ps1 create mode 100644 scripts/build/release.ps1 create mode 100644 scripts/clean.ps1 create mode 100644 scripts/models.ps1 create mode 100644 scripts/run/debug.ps1 create mode 100644 scripts/run/release.ps1 diff --git a/.github/workflows/cmake.yml b/.github/workflows/cmake.yml index 43c5fb3..4da45e6 100644 --- a/.github/workflows/cmake.yml +++ b/.github/workflows/cmake.yml @@ -13,27 +13,37 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, macos-latest] + os: [ubuntu-latest, macos-latest, windows-latest] build_type: [Release] - c_compiler: [gcc, clang] + c_compiler: [clang, cl] include: - - os: ubuntu-latest - c_compiler: gcc - cpp_compiler: g++ - os: ubuntu-latest c_compiler: clang cpp_compiler: clang++ - os: macos-latest c_compiler: clang cpp_compiler: clang++ + - os: windows-latest + c_compiler: cl + cpp_compiler: cl exclude: + - os: ubuntu-latest + c_compiler: cl - os: macos-latest - c_compiler: gcc + c_compiler: cl + - os: windows-latest + c_compiler: clang steps: - uses: actions/checkout@v4 with: submodules: 'true' - - name: Build (Linux/macOS) + - name: Build on Windows + if: runner.os == 'Windows' + run: .\scripts\build\release.ps1 + shell: pwsh + + - name: Build on Linux/macOS + if: runner.os != 'Windows' run: ./scripts/build/release.sh diff --git a/.gitmodules b/.gitmodules index 15fb155..43d1695 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,6 +1,6 @@ -[submodule "JUCE"] - path = JUCE - url = git@github.com:juce-framework/JUCE.git [submodule "llama.cpp"] path = llama.cpp - url = git@github.com:ggerganov/llama.cpp.git + url = https://github.com/ggerganov/llama.cpp.git +[submodule "JUCE"] + path = JUCE + url = https://github.com/juce-framework/JUCE.git diff --git a/CMakeLists.txt b/CMakeLists.txt index ef06fbc..b41b69c 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -10,7 +10,8 @@ juce_add_plugin(musegpt PLUGIN_CODE Muse FORMATS AAX AU AUv3 VST3 Standalone PRODUCT_NAME "musegpt" - COPY_PLUGIN_AFTER_BUILD TRUE) + COPY_PLUGIN_AFTER_BUILD TRUE + NEEDS_WEBVIEW2 TRUE) juce_generate_juce_header(musegpt) target_sources(musegpt @@ -24,6 +25,7 @@ target_compile_definitions(musegpt JUCE_LOGGING=1 # Enable logging JUCE_STRICT_REFCOUNTEDPOINTER=1 JUCE_WEB_BROWSER=1 + JUCE_USE_WIN_WEBVIEW2_WITH_STATIC_LINKING=1 JUCE_USE_CURL=0 JUCE_VST3_CAN_REPLACE_VST2=0) @@ -68,58 +70,99 @@ PRIVATE $<$:-O3> ) -# Add llama-server as a binary resource -add_custom_command( - OUTPUT ${CMAKE_BINARY_DIR}/llama-server - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_SOURCE_DIR}/build/llama.cpp/bin/llama-server - ${CMAKE_BINARY_DIR}/llama-server - DEPENDS ${CMAKE_SOURCE_DIR}/build/llama.cpp/bin/llama-server -) -add_custom_target(copy_llama_server ALL DEPENDS ${CMAKE_BINARY_DIR}/llama-server) - -# Ensure the executable is copied into the bundle -set_source_files_properties(${CMAKE_BINARY_DIR}/llama-server PROPERTIES MACOSX_PACKAGE_LOCATION Resources) -target_sources(musegpt PRIVATE ${CMAKE_BINARY_DIR}/llama-server) - -# Copy llama-server to VST plugin format's output directory -add_custom_command(TARGET musegpt POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_BINARY_DIR}/llama-server - $/VST3/musegpt.vst3/Contents/Resources/llama-server -) - -# Copy llama-server to Standalone plugin format's output directory -add_custom_command(TARGET musegpt POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_BINARY_DIR}/llama-server - $/Standalone/musegpt.app/Contents/Resources/llama-server -) - -# Copy model weights to VST plugin format's output directory -add_custom_command(TARGET musegpt POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf - $/VST3/musegpt.vst3/Contents/Resources/gemma-2b-it.fp16.gguf -) - -# Copy model weights to Standalone plugin format's output directory -add_custom_command(TARGET musegpt POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf - $/Standalone/musegpt.app/Contents/Resources/gemma-2b-it.fp16.gguf -) - -# Copy model weights to AAX plugin format's output directory -add_custom_command(TARGET musegpt POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf - $/AAX/musegpt.aaxplugin/Contents/Resources/gemma-2b-it.fp16.gguf -) - -# Copy model weights to AU plugin format's output directory -add_custom_command(TARGET musegpt POST_BUILD - COMMAND ${CMAKE_COMMAND} -E copy - ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf - $/AU/musegp.component/Contents/Resources/gemma-2b-it.fp16.gguf -) \ No newline at end of file +# Add llama-server as a binary resource and copy files +if(WIN32) + # Windows-specific commands + add_custom_command( + OUTPUT ${CMAKE_BINARY_DIR}/$/llama-server.exe + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_SOURCE_DIR}/build/llama.cpp/bin/$/llama-server.exe + ${CMAKE_BINARY_DIR}/$/llama-server.exe + DEPENDS ${CMAKE_SOURCE_DIR}/build/llama.cpp/bin/$/llama-server.exe + ) + add_custom_target(copy_llama_server ALL DEPENDS ${CMAKE_BINARY_DIR}/$/llama-server.exe) + set_source_files_properties(${CMAKE_BINARY_DIR}/$/llama-server.exe PROPERTIES MACOSX_PACKAGE_LOCATION Resources) + target_sources(musegpt PRIVATE ${CMAKE_BINARY_DIR}/$/llama-server.exe) + + # Copy llama-server to VST plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_BINARY_DIR}/$/llama-server.exe + $/VST3/musegpt.vst3/Contents/Resources/llama-server.exe + ) + + # Copy llama-server to Standalone plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_BINARY_DIR}/$/llama-server.exe + $/musegpt.exe/llama-server.exe + ) + + # Copy model weights to VST plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf + $/VST3/musegpt.vst3/Contents/Resources/gemma-2b-it.fp16.gguf + ) + + # Copy model weights to Standalone plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf + $/musegpt.exe/gemma-2b-it.fp16.gguf + ) +else() + # Non-Windows commands + add_custom_command( + OUTPUT ${CMAKE_BINARY_DIR}/llama-server + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_SOURCE_DIR}/build/llama.cpp/bin/llama-server + ${CMAKE_BINARY_DIR}/llama-server + DEPENDS ${CMAKE_SOURCE_DIR}/build/llama.cpp/bin/llama-server + ) + add_custom_target(copy_llama_server ALL DEPENDS ${CMAKE_BINARY_DIR}/llama-server) + set_source_files_properties(${CMAKE_BINARY_DIR}/llama-server PROPERTIES MACOSX_PACKAGE_LOCATION Resources) + target_sources(musegpt PRIVATE ${CMAKE_BINARY_DIR}/llama-server) + + # Copy llama-server to VST plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_BINARY_DIR}/llama-server + $/VST3/musegpt.vst3/Contents/Resources/llama-server + ) + + # Copy llama-server to Standalone plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_BINARY_DIR}/llama-server + $/Standalone/musegpt.app/Contents/Resources/llama-server + ) + + # Copy model weights to VST plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf + $/VST3/musegpt.vst3/Contents/Resources/gemma-2b-it.fp16.gguf + ) + + # Copy model weights to Standalone plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf + $/Standalone/musegpt.app/Contents/Resources/gemma-2b-it.fp16.gguf + ) + + # Copy model weights to AAX plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf + $/AAX/musegpt.aaxplugin/Contents/Resources/gemma-2b-it.fp16.gguf + ) + + # Copy model weights to AU plugin format's output directory + add_custom_command(TARGET musegpt POST_BUILD + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_SOURCE_DIR}/models/gemma-2b-it.fp16.gguf + $/AU/musegp.component/Contents/Resources/gemma-2b-it.fp16.gguf + ) +endif() \ No newline at end of file diff --git a/README.md b/README.md index 254227f..397fc7c 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,16 @@ # [musegpt](https://github.com/greynewell/musegpt) [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt)](https://github.com/greynewell/musegpt/stargazers) -[![CMake](https://github.com/greynewell/musegpt/actions/workflows/cmake.yml/badge.svg?branch=main)](https://github.com/greynewell/musegpt/actions/workflows/cmake.yml) [![License: AGPL v3](https://img.shields.io/badge/License-AGPL%20v3-blue.svg)](https://www.gnu.org/licenses/agpl-3.0) [![Platform Support](https://img.shields.io/badge/platform-macOS%20%7C%20Linux-blue)](#supported-platforms) [![C++](https://img.shields.io/badge/c++-17-%2300599C.svg?logo=c%2B%2B&logoColor=white)](https://isocpp.org/) [![JUCE](https://img.shields.io/badge/JUCE-8-8DC63F&logo=juce&logoColor=white)](https://juce.com/) [![llama.cpp](https://img.shields.io/badge/llama.cpp-feff4aa-violet&logoColor=white)](https://github.com/ggerganov/llama.cpp/commit/feff4aa8461da7c432d144c11da4802e41fef3cf) +[![CMake](https://github.com/greynewell/musegpt/actions/workflows/cmake.yml/badge.svg?branch=main)](https://github.com/greynewell/musegpt/actions/workflows/cmake.yml) [![License: AGPL v3](https://img.shields.io/badge/License-AGPL%20v3-blue.svg)](https://github.com/greynewell/musegpt/blob/main/LICENSE) [![Platform Support](https://img.shields.io/badge/platform-Windows%20%7C%20macOS%20%7C%20Linux-blue)](https://musegpt.org/requirements.html) [![C++](https://img.shields.io/badge/c++-17-%2300599C.svg?logo=c%2B%2B&logoColor=white)](https://musegpt.org/requirements.html) [![JUCE](https://img.shields.io/badge/JUCE-8-8DC63F&logo=juce&logoColor=white)](https://musegpt.org/requirements.html) [![llama.cpp](https://img.shields.io/badge/llama.cpp-feff4aa-violet&logoColor=white)](https://musegpt.org/requirements.html) -Run local Large Language Models (LLMs) in your Digital Audio Workstation (DAW) to create music. +Run local Large Language Models (LLMs) in your Digital Audio Workstation (DAW) to provide inspiration, instructions, and analysis for your music creation. ## Table of Contents - [Features](#features) - [Demo](#demo) -- [Installation](#installation) -- [Getting Started](#getting-started) - [Requirements](#requirements) +- [Installation](#installation) - [Usage](#usage) -- [Supported Platforms](#supported-platforms) -- [Supported Models](#supported-models) - [Architecture](#architecture) - [Contributing](#contributing) - [License](#license) @@ -36,94 +33,19 @@ For more information about plans for upcoming features, check out the [Roadmap o *Click the image above to watch a demo of musegpt in action.* -## Installation - -To install `musegpt`, you can download the latest binaries from [Releases](https://github.com/greynewell/musegpt/releases). - -If you want to build from source, follow these steps: - -1. **Clone the repository:** - - ```bash - git clone --recurse-submodules -j2 https://github.com/greynewell/musegpt.git - cd musegpt - ``` - -2. **Install dependencies:** - - Ensure you have the required dependencies installed. See [Requirements](#requirements) for details. - -3. **Build the project:** - - Run the shell build script: - - ```bash - ./scripts/build/debug.sh - ``` - - or - - ```bash - ./scripts/build/release.sh - ``` - -4. **Install the plugin:** - - CMake will automatically copy the built VST3, AU, or AAX plugin to your DAW's plugin directory. - - - **macOS:** `~/Library/Audio/Plug-Ins/VST3/` - - **Linux:** `~/.vst3/` - -## Getting Started - -After installing musegpt, open your DAW and rescan for new plugins. Load `musegpt` as a plugin and start interacting with the LLM to enhance your music creation process! - -## System Prompt - -Feel free to experiment with the system prompt to customize the behavior of the LLM. Here's a suggestion to get you started: - -> You are a helpful assistant that lives inside a musician's Digital Audio Workstation. Help them by giving them step-by-step instructions about music—composition, writing, performance, production, and engineering—in a creative and methodical way. - ## Requirements -- **Operating System:** - - macOS 10.11 or later - - Linux (mainstream distributions) -- **DAW Support:** Any DAW that supports VST3 plugins (Ableton Live, FL Studio, Logic Pro, Pro Tools, etc.) -- **Dependencies:** - - [JUCE](https://juce.com/) (Audio application framework) - - [llama.cpp](https://github.com/ggerganov/llama.cpp) (LLM inference library) - - C++17 compatible compiler (e.g., GCC 7+, Clang 5+, MSVC 2017+) - - [CMake](https://cmake.org/) 3.15 or later - -## Usage - -1. **Load the Plugin:** - - In your DAW, add musegpt as a VST3 plugin on a track. +You'll need a C++17 compatible compiler, CMake, and Python 3.10 or later. See [Requirements](https://musegpt.org/requirements.html) for more details on supported Operating Systems, models, DAWs, and more. -2. **Interact with the LLM:** - - Use the plugin's interface to chat with the integrated LLM. You can input MIDI or audio data for analysis (features under development). - -3. **Create Music:** - - Leverage the power of AI to inspire new musical ideas, assist with composition, or generate creative suggestions. - -## Supported Platforms - -musegpt is cross-platform and supports the following operating systems: - -- **macOS:** macOS 10.11 or later -- **Linux:** Mainstream distributions +## Installation -## Supported Models +To install `musegpt`, you can download the latest binaries from [Releases](https://github.com/greynewell/musegpt/releases). -musegpt currently supports the following models: +If you want to build from source, follow the [Installation](https://musegpt.org/installation.html) instructions. -- **gemma-2b-it.fp16.gguf** +## Usage -Any model compatible with `llama.cpp` should work with `musegpt`. Feel free to experiment with different models to find the best one for your needs—and raise a pull request! +Please refer to the [Usage](https://musegpt.org/usage.html) section of the documentation. ## Architecture diff --git a/docs/features.md b/docs/features.md index 18f4513..d752495 100644 --- a/docs/features.md +++ b/docs/features.md @@ -1,5 +1,9 @@ # Features [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt)](https://github.com/greynewell/musegpt/stargazers) +**musegpt** allows you to run local Large Language Models directly within your DAW, enhancing your music creation process by providing AI-powered assistance. + +Current features include: + - ✅ LLM chat - ✅ VST3 plugin - ✅ MIDI input @@ -9,7 +13,7 @@ - ❌ MIDI generation (Upcoming) - ❌ Audio generation (Upcoming) -**musegpt** allows you to run local Large Language Models directly within your DAW, enhancing your music creation process by providing AI-powered assistance. +To see upcoming features, check out the [GitHub issues](https://github.com/greynewell/musegpt/issues) and [Roadmap on GitHub Projects](https://github.com/greynewell/musegpt/projects/1). --- diff --git a/docs/getting_started.md b/docs/getting_started.md deleted file mode 100644 index f3ed802..0000000 --- a/docs/getting_started.md +++ /dev/null @@ -1,29 +0,0 @@ -# Getting Started [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt)](https://github.com/greynewell/musegpt/stargazers) - -After installing **musegpt**, follow these steps to start integrating AI into your music creation process: - -1. **Rescan Plugins in Your DAW:** - - Open your DAW and rescan for new plugins to detect **musegpt**. - -2. **Load musegpt Plugin:** - - Add **musegpt** as a VST3 plugin on a track within your DAW. - -3. **Interact with the LLM:** - - Use the plugin's interface to chat with the integrated LLM. You can input MIDI or audio data for analysis (features under development). - -4. **Create Music:** - - Leverage the power of AI to inspire new musical ideas, assist with composition, or generate creative suggestions. - -## System Prompt - -Feel free to experiment with the system prompt to customize the behavior of the LLM. Here's a suggestion to get you started: - -> You are a helpful assistant that lives inside a musician's Digital Audio Workstation. Help them by giving them step-by-step instructions about music—composition, writing, performance, production, and engineering—in a creative and methodical way. - ---- - -*[Back to Home](index.md)* \ No newline at end of file diff --git a/docs/index.md b/docs/index.md index 7ef4f69..5b317ba 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,8 +1,8 @@ # [musegpt](https://github.com/greynewell/musegpt) [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt)](https://github.com/greynewell/musegpt/stargazers) -[![CMake](https://github.com/greynewell/musegpt/actions/workflows/cmake.yml/badge.svg?branch=main)](https://github.com/greynewell/musegpt/actions/workflows/cmake.yml) [![License: AGPL v3](https://img.shields.io/badge/License-AGPL%20v3-blue.svg)](https://www.gnu.org/licenses/agpl-3.0) [![Platform Support](https://img.shields.io/badge/platform-macOS%20%7C%20Linux-blue)](supported_platforms.md) [![C++](https://img.shields.io/badge/c++-17-%2300599C.svg?logo=c%2B%2B&logoColor=white)](https://isocpp.org/) [![JUCE](https://img.shields.io/badge/JUCE-8-8DC63F&logo=juce&logoColor=white)](https://juce.com/) [![llama.cpp](https://img.shields.io/badge/llama.cpp-feff4aa-violet&logoColor=white)](https://github.com/ggerganov/llama.cpp/commit/feff4aa8461da7c432d144c11da4802e41fef3cf) +[![CMake](https://github.com/greynewell/musegpt/actions/workflows/cmake.yml/badge.svg?branch=main)](https://github.com/greynewell/musegpt/actions/workflows/cmake.yml) [![License: AGPL v3](https://img.shields.io/badge/License-AGPL%20v3-blue.svg)](https://github.com/greynewell/musegpt/blob/main/LICENSE) [![Platform Support](https://img.shields.io/badge/platform-Windows%20%7C%20macOS%20%7C%20Linux-blue)](https://musegpt.org/requirements.html) [![C++](https://img.shields.io/badge/c++-17-%2300599C.svg?logo=c%2B%2B&logoColor=white)](https://musegpt.org/requirements.html) [![JUCE](https://img.shields.io/badge/JUCE-8-8DC63F&logo=juce&logoColor=white)](https://musegpt.org/requirements.html) [![llama.cpp](https://img.shields.io/badge/llama.cpp-feff4aa-violet&logoColor=white)](https://musegpt.org/requirements.html) -Run local Large Language Models (LLMs) in your Digital Audio Workstation (DAW) to create music. +Run local Large Language Models (LLMs) in your Digital Audio Workstation (DAW) to provide inspiration, instructions, and analysis for your music creation. --- @@ -13,12 +13,9 @@ Welcome to **musegpt** documentation! This site will guide you through the featu - [Demonstration](demo.md) - [Features](features.md) - [Installation](installation.md) -- [Getting Started](getting_started.md) - [Usage](usage.md) - [Requirements](requirements.md) -- [Supported Platforms](supported_platforms.md) - [Technical Approach](technical_approach.md) -- [Practical Applications](practical_applications.md) - [Ethics](ethics.md) - [Acknowledgments](acknowledgments.md) diff --git a/docs/installation.md b/docs/installation.md index 6a4db0b..bf6e478 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -7,6 +7,7 @@ You can install **musegpt** by downloading the latest binaries from the [Release If you prefer to build from source, follow these steps: 1. **Clone the repository:** + For all platforms: ```bash git clone --recurse-submodules -j2 https://github.com/greynewell/musegpt.git @@ -15,11 +16,11 @@ If you prefer to build from source, follow these steps: 2. **Install dependencies:** - Ensure you have the required dependencies installed. See [Requirements](requirements.md) for details. + Ensure you have the required dependencies installed. See [Requirements](https://musegpt.org/requirements.html) for details. 3. **Build the project:** - Run the shell build script: + For Unix-based systems (Linux, macOS): ```bash ./scripts/build/debug.sh @@ -31,12 +32,45 @@ If you prefer to build from source, follow these steps: ./scripts/build/release.sh ``` + For Windows (PowerShell): + + ```bash + ./scripts/build/debug.ps1 + ``` + + or + + ```bash + ./scripts/build/release.ps1 + ``` + + **You may need to run the above commands with administrative privileges on Windows.** + + Each build script will also download the relevant model weights for the inference engine. + 4. **Install the plugin:** CMake will automatically copy the built VST3, AU, or AAX plugin to your DAW's plugin directory. Example paths for VST3 are: - **macOS:** `~/Library/Audio/Plug-Ins/VST3/` - **Linux:** `~/.vst3/` + - **Windows:** `%USERPROFILE%\Documents\VST3\` + +5. **Run the plugin:** + + Start your DAW and you should see **musegpt** in your plugin list. For more detailed instructions on how to use **musegpt**, see the [Usage](https://musegpt.org/usage.html) section of the documentation. + +6. **Clean the project (if needed):** + + For Unix-based systems (Linux, macOS): + ```bash + ./scripts/clean.sh + ``` + + For Windows (PowerShell): + ```bash + ./scripts/clean.ps1 + ``` --- diff --git a/docs/practical_applications.md b/docs/practical_applications.md deleted file mode 100644 index 123e45b..0000000 --- a/docs/practical_applications.md +++ /dev/null @@ -1,22 +0,0 @@ -# Practical Applications [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt)](https://github.com/greynewell/musegpt/stargazers) - -## Inspiration and Instructions - -- **Creative Prompts:** Generate ideas to inspire your music creation. -- **Guidance:** Receive step-by-step instructions for new projects. -- **Mastery:** Get tips for mastering new sounds or genres in your DAW. - -## Lyric Generation & Cowriting - -- **Co-writing:** Collaborate with the AI to generate and refine lyrics. -- **Workflow Efficiency:** Work within your DAW to maintain creative flow. -- **Privacy:** Keep your data local and secure. - -## Experimenting with AI Collaboration - -- **Innovation:** Explore new workflows and techniques with AI assistance. -- **Collaboration:** Discover novel creative possibilities through AI-human collaboration. - ---- - -*[Back to Home](index.md)* \ No newline at end of file diff --git a/docs/requirements.md b/docs/requirements.md index 02c3970..7a1d9ff 100644 --- a/docs/requirements.md +++ b/docs/requirements.md @@ -1,8 +1,9 @@ -# Requirements [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt)](https://github.com/greynewell/musegpt/stargazers) +# Requirements [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt?style=social)](https://github.com/greynewell/musegpt/stargazers) ## Operating System - **macOS:** macOS 10.11 or later +- **Windows:** Windows 10 or later - **Linux:** Mainstream distributions ## DAW Support @@ -13,7 +14,11 @@ Any DAW that supports VST3 plugins (e.g., Ableton Live, FL Studio, Logic Pro, Pr - **JUCE:** Audio application framework - **llama.cpp:** LLM inference library -- **Compiler:** C++17 compatible compiler (e.g., GCC 7+, Clang 5+, MSVC 2017+) +- **Compiler:** + - macOS: Clang 6.0 or later + - Windows: Visual Studio 2022 Build Tools for C++ + - Linux: Clang 6.0 or later +- **Python:** 3.10 or later (for model downloading and processing) - **CMake:** Version 3.15 or later ## Supported Models diff --git a/docs/supported_platforms.md b/docs/supported_platforms.md deleted file mode 100644 index 5d6fdb0..0000000 --- a/docs/supported_platforms.md +++ /dev/null @@ -1,14 +0,0 @@ -# Supported Platforms [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt)](https://github.com/greynewell/musegpt/stargazers) - -**musegpt** is cross-platform and supports the following operating systems: - -- **macOS:** macOS 10.11 or later -- **Linux:** Mainstream distributions - -## DAW Compatibility - -**musegpt** is compatible with any DAW that supports VST3, Audio Unit or AAX plugins. - ---- - -*[Back to Home](index.md)* \ No newline at end of file diff --git a/docs/thank_you.md b/docs/thank_you.md index 0e6fdf1..8cab6e8 100644 --- a/docs/thank_you.md +++ b/docs/thank_you.md @@ -1,6 +1,6 @@ # Thank You! [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt)](https://github.com/greynewell/musegpt/stargazers) -We appreciate everyone who has given the repository a star on GitHub! If you're excited to work on the future of human and AI musical collaboration, check out some open issues on the [repo](https://github.com/greynewell/musegpt). +We appreciate [everyone who has given the repository a star on GitHub](https://github.com/greynewell/musegpt/stargazers)! If you're excited to work on the future of human and AI musical collaboration, check out some open issues on the [repo](https://github.com/greynewell/musegpt). If you enjoyed this project, we'd love your feedback on [X](https://x.com/greynewell) or [LinkedIn](https://www.linkedin.com/in/greynewell/)! Feel free to share this documentation or the [GitHub repository](https://github.com/greynewell/musegpt) with your friends, forums, and on social media. diff --git a/docs/usage.md b/docs/usage.md index f836b82..c297a6c 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -1,6 +1,30 @@ # Usage [![GitHub Repo stars](https://img.shields.io/github/stars/greynewell/musegpt)](https://github.com/greynewell/musegpt/stargazers) -**musegpt** enhances your music production workflow by integrating AI assistance directly into your DAW. +After [installing **musegpt**](https://musegpt.org/installation.html), follow these steps to start integrating AI into your music creation process: + +## Getting Started + +1. **Rescan Plugins in Your DAW:** + + Open your DAW and rescan for new plugins to detect **musegpt**. + +2. **Load musegpt Plugin:** + + Add **musegpt** as a VST3 plugin on a track within your DAW. + +3. **Interact with the LLM:** + + Use the plugin's interface to chat with the integrated LLM. You can input MIDI or audio data for analysis (features under development). + +4. **Create Music:** + + Leverage the power of AI to inspire new musical ideas, assist with composition, or generate creative suggestions. + +## System Prompt + +Feel free to experiment with the system prompt to customize the behavior of the LLM. Here's a suggestion to get you started: + +> You are a helpful assistant that lives inside a musician's Digital Audio Workstation. Help them by giving them step-by-step instructions about music—composition, writing, performance, production, and engineering—in a creative and methodical way. ## Interacting with the LLM diff --git a/llama.cpp b/llama.cpp index feff4aa..116efee 160000 --- a/llama.cpp +++ b/llama.cpp @@ -1 +1 @@ -Subproject commit feff4aa8461da7c432d144c11da4802e41fef3cf +Subproject commit 116efee0eef09d8c3c4c60b52fa01b56ddeb432c diff --git a/scripts/build/debug.ps1 b/scripts/build/debug.ps1 new file mode 100644 index 0000000..0378a16 --- /dev/null +++ b/scripts/build/debug.ps1 @@ -0,0 +1,25 @@ +# Wipe out build output from this project +Remove-Item -Path "$env:USERPROFILE\Documents\VST3\musegpt.vst3" -Recurse -Force -ErrorAction SilentlyContinue + +# Create build output directories +New-Item -Path "build\debug" -ItemType Directory -Force + +# Download models +& "$PSScriptRoot\..\models.ps1" + +# Install webview2 +Register-PackageSource -provider NuGet -name nugetRepository -location https://www.nuget.org/api/v2 -Force +Install-Package Microsoft.Web.WebView2 -Scope CurrentUser -RequiredVersion 1.0.1901.177 -Source nugetRepository -Force + +# build llama.cpp server + +Push-Location llama.cpp +cmake -S . -G "Visual Studio 17 2022" -B ..\build\llama.cpp +cmake --build ..\build\llama.cpp -j $env:NUMBER_OF_PROCESSORS --target llama-server +Pop-Location + +# build main project +Push-Location build +cmake -S .. -B debug +cmake --build debug --config Debug -j $env:NUMBER_OF_PROCESSORS +Pop-Location \ No newline at end of file diff --git a/scripts/build/release.ps1 b/scripts/build/release.ps1 new file mode 100644 index 0000000..6bb655d --- /dev/null +++ b/scripts/build/release.ps1 @@ -0,0 +1,24 @@ +# Wipe out build output from this project +Remove-Item -Path "$env:USERPROFILE\Documents\VST3\musegpt.vst3" -Recurse -Force -ErrorAction SilentlyContinue + +# Create build output directories +New-Item -Path "build\release" -ItemType Directory -Force + +# Download models +& "$PSScriptRoot\..\models.ps1" + +# Install webview2 +Register-PackageSource -provider NuGet -name nugetRepository -location https://www.nuget.org/api/v2 -Force +Install-Package Microsoft.Web.WebView2 -Scope CurrentUser -RequiredVersion 1.0.1901.177 -Source nugetRepository -Force + +# build llama.cpp server +Push-Location llama.cpp +cmake -S . -G "Visual Studio 17 2022" -B ..\build\llama.cpp +cmake --build ..\build\llama.cpp -j $env:NUMBER_OF_PROCESSORS --target llama-server --config Release +Pop-Location + +# build main project +Push-Location build +cmake -S .. -B release +cmake --build release --config Release -j $env:NUMBER_OF_PROCESSORS +Pop-Location \ No newline at end of file diff --git a/scripts/build/release.sh b/scripts/build/release.sh index 9471b24..85c7014 100755 --- a/scripts/build/release.sh +++ b/scripts/build/release.sh @@ -12,11 +12,11 @@ scripts/models.sh # build llama.cpp server cd llama.cpp cmake -S . -B ../build/llama.cpp -cmake --build ../build/llama.cpp -j $(sysctl -n hw.physicalcpu) --target llama-server +cmake --build ../build/llama.cpp -j 4 --target llama-server --config Release cd .. # build main project cd build cmake -S .. -B release -cmake --build release --config Release -j $(sysctl -n hw.physicalcpu) +cmake --build release --config Release -j 4 cd .. \ No newline at end of file diff --git a/scripts/clean.ps1 b/scripts/clean.ps1 new file mode 100644 index 0000000..a662d25 --- /dev/null +++ b/scripts/clean.ps1 @@ -0,0 +1,12 @@ +# Clean script to wipe out all transient state + +# Remove build directory +Remove-Item -Path "build" -Recurse -Force -ErrorAction SilentlyContinue + +# Remove models directory +Remove-Item -Path "models" -Recurse -Force -ErrorAction SilentlyContinue + +# Remove .env directory +Remove-Item -Path ".env" -Recurse -Force -ErrorAction SilentlyContinue + +Write-Host "Cleanup completed." \ No newline at end of file diff --git a/scripts/models.ps1 b/scripts/models.ps1 new file mode 100644 index 0000000..f2cc93a --- /dev/null +++ b/scripts/models.ps1 @@ -0,0 +1,16 @@ +# Create models directory if it doesn't exist +New-Item -ItemType Directory -Force -Path .\models + +# Setup virtual environment and dependencies +# Remove .env directory +Remove-Item -Path ".env" -Recurse -Force -ErrorAction SilentlyContinue +python -m venv .env +.\.env\Scripts\Activate.ps1 +pip install -r requirements.txt + +# Download GGUF models +$MODEL_REPO="MaziyarPanahi/gemma-2b-it-GGUF" +$MODEL_FILE="gemma-2b-it.fp16.gguf" +$MODEL_DIR=".\models" + +huggingface-cli download $MODEL_REPO $MODEL_FILE --local-dir $MODEL_DIR \ No newline at end of file diff --git a/scripts/run/debug.ps1 b/scripts/run/debug.ps1 new file mode 100644 index 0000000..29c4220 --- /dev/null +++ b/scripts/run/debug.ps1 @@ -0,0 +1 @@ +.\build\debug\musegpt_artefacts\Debug\Standalone\musegpt.exe \ No newline at end of file diff --git a/scripts/run/release.ps1 b/scripts/run/release.ps1 new file mode 100644 index 0000000..010fd78 --- /dev/null +++ b/scripts/run/release.ps1 @@ -0,0 +1 @@ +.\build\release\musegpt_artefacts\Release\Standalone\musegpt.exe \ No newline at end of file