Skip to content

Commit

Permalink
Merge pull request #71 from warpy-ai/15-create-an-installation-script
Browse files Browse the repository at this point in the history
15 create an installation script
  • Loading branch information
jucasoliveira authored Mar 11, 2024
2 parents 86da0c8 + d4c9972 commit ba1bf90
Show file tree
Hide file tree
Showing 8 changed files with 223 additions and 35 deletions.
35 changes: 15 additions & 20 deletions .github/workflows/pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,26 +5,21 @@ on:
branches:
- main

env:
CARGO_TERM_COLOR: always

jobs:
rust_test:
build_and_test:
name: Rust project - latest
runs-on: ubuntu-latest

strategy:
matrix:
toolchain:
- stable
- beta
- nightly
steps:
- name: Checkout code
uses: actions/checkout@v2

- name: Install Rust
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true

- name: List project
run: ls -a

- name: Run Rust tests
run: cargo test --all

- name: Run Clean
run: cargo clean
- uses: actions/checkout@v3
- run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }}
- run: cargo build --verbose
- run: cargo test --verbose
2 changes: 1 addition & 1 deletion .github/workflows/release-pkg.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ else
cp "target/$TARGET/release/$BIN_NAME" "$DIST_DIR/"
cd "$DIST_DIR"
ARCHIVE_NAME="${BIN_NAME}-${VERSION}-${TARGET}.tar.gz"
tar -czf "$ARCHIVE_NAME" "$BIN_NAME"
tar -czf "$ARCHIVE_NAME" *
fi

echo "Packaging completed: $DIST_DIR"
Expand Down
4 changes: 4 additions & 0 deletions .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ permissions:
contents: write
discussions: write

env:
CARGO_TERM_COLOR: always


jobs:
standard:
name: Build-and-deploy
Expand Down
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ name = "tgs"
version = "0.1.0"
authors = ["Lucas Oliveira <[email protected]>"]
edition = "2021"
build = "build.rs"


# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
Expand All @@ -37,3 +38,4 @@ regex = "1.5.5"
term = "0.7.0"
duct = "0.13.6"
tokio = { version = "1", features = ["full"] }
fs_extra = "1.3"
75 changes: 75 additions & 0 deletions build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
// build.rs
use std::env;
use std::fs;
use std::io;
use std::path::Path;

fn main() {
println!("cargo:warning=CWD is {:?}", env::current_dir().unwrap());
println!(
"cargo:warning=CARGO_MANIFEST_DIR is {:?}",
env::var("CARGO_MANIFEST_DIR").unwrap()
);

let output_path = get_output_path();

println!("cargo:warning=Calculated build path: {:?}", output_path);

let input_path =
Path::new(&env::var("CARGO_MANIFEST_DIR").unwrap()).join("crates/tgs_t5_finetunned/");

if let Ok(context) = env::var("CUSTOM_BUILD_CONTEXT") {
if context == "run" {
// Special handling for `cargo run`
println!("Handling for cargo run");
}
}

// Define the destination paths
let model_dest = input_path.join("model");

let joinable_path = output_path.unwrap();
let out_model_dest = joinable_path.join("model");
let script_dest = input_path.join("inference_model.py");
println!("Attempting to copy from: {}", script_dest.display());
let out_dest = joinable_path.join("inference_model.py");

// Copy model directory and inference_model.py to the target directory
fs::copy(script_dest, out_dest).expect("Failed to copy inference_model.py");

// Note: For directory copying, consider using a crate like `fs_extra`
copy_dir_all(model_dest, out_model_dest).expect("Failed to copy model directory");
}

fn get_output_path() -> Result<std::path::PathBuf, Box<dyn std::error::Error>> {
//<root or manifest path>/target/<profile>/
let manifest_dir_string = std::path::PathBuf::from(std::env::var("OUT_DIR")?);
let build_type = env::var("PROFILE").unwrap();

let mut target_dir = None;
let mut sub_path = manifest_dir_string.as_path();

while let Some(parent) = sub_path.parent() {
if parent.ends_with(&build_type) {
target_dir = Some(parent);
break;
}
sub_path = parent;
}
let target_dir = target_dir.ok_or("not found")?;
Ok(target_dir.to_path_buf())
}

fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()> {
fs::create_dir_all(&dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_dir() {
copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?;
} else {
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
}
}
Ok(())
}
10 changes: 6 additions & 4 deletions crates/tgs_t5_finetunned/inference_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,14 @@
from transformers import T5ForConditionalGeneration, T5Tokenizer


def load_model():
def load_model(relative_model_path=None):
# Get the directory where the script is located
base_path = os.path.dirname(os.path.abspath(__file__))

# The model directory is in the same parent directory as the script
# Adjust this to the correct relative path
relative_model_path = "crates/tgs_t5_finetunned/model"
if relative_model_path is None:
relative_model_path = "crates/tgs_t5_finetunned/model"

# Construct the absolute path to the model directory
model_path = os.path.join(base_path, relative_model_path)
Expand All @@ -31,8 +32,9 @@ def load_model():
return model, tokenizer


def generate_answer(input_text, max_length=50):
model, tokenizer = load_model() # Load the model and tokenizer
def generate_answer(input_text, relative_model_path=None, max_length=50):
# Load the model and tokenizer
model, tokenizer = load_model(relative_model_path)

# Ensure the model is in evaluation mode
model.eval()
Expand Down
51 changes: 41 additions & 10 deletions crates/tgs_t5_finetunned/src/from_py.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use dialoguer::{theme::ColorfulTheme, Select};
use pyo3::{prelude::*, types::PyModule};
use std::path::PathBuf;
use std::{env, fs};
use tgs_colors::custom;
use tgs_loader::LoadingIndicator;
Expand Down Expand Up @@ -31,28 +32,58 @@ fn call_dialoger(result: String) -> String {
}
}

fn find_inference_model() -> Result<PathBuf, String> {
// Attempt to find the script relative to the executable's current directory first
let mut exe_path = std::env::current_exe()
.map_err(|e| format!("Failed to get current executable path: {}", e))?;
exe_path.pop(); // Remove the executable name, leaving the directory
exe_path.push("inference_model.py"); // Try to find the script in the executable's directory

if exe_path.exists() {
return Ok(exe_path);
}

// Fallback: Attempt to find the script in a development-specific path
let dev_path = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into()))
.join("crates/tgs_t5_finetunned")
.join("inference_model.py");

if dev_path.exists() {
return Ok(dev_path);
}

Err("Failed to find inference_model.py in any known location.".into())
}

fn get_model_base_path() -> String {
if cfg!(debug_assertions) {
// Assume we're in a non-built (development) environment
"crates/tgs_t5_finetunned/model".to_string()
} else {
// Assume we're in a built (production) environment
"model".to_string() // Adjust according to your production directory structure if necessary
}
}

pub fn execute(input_text: &str) -> PyResult<String> {
let loader = LoadingIndicator::new(custom::DARK_WHITE);
pyo3::prepare_freethreaded_python();
// Construct the absolute path to the Python script
let mut script_path = env::current_exe()?;
// Navigate up to the project root directory
for _ in 0..3 {
script_path.pop();
}
// Now script_path should point to the project root
script_path.push("crates/tgs_t5_finetunned/inference_model.py"); // Navigate to the script

let executable_path = find_inference_model().expect("Failed to find inference_model.py");
let base_path = get_model_base_path(); // Get the base path

loader.start(input_text);

Python::with_gil(|py| {
let code = fs::read_to_string(script_path)?;
let code = fs::read_to_string(&executable_path)?;

let module = PyModule::from_code(py, &code, "inference_model.py", "inference_model")?;

let result: PyResult<String> = module
.getattr("generate_answer")?
.call1((input_text,))?
.call1((input_text, base_path))?
.extract();

loader.stop();
Ok(call_dialoger(result?))
})
Expand Down
79 changes: 79 additions & 0 deletions install.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
#!/bin/bash

# Ensure the script fails on error
set -euo pipefail

# Define the GitHub repository
REPO="warpy-ai/tgs"

# Check if a version argument was provided
if [ "$#" -eq 1 ]; then
VERSION="$1"
echo "User specified version: $VERSION"
else
# Fetch the latest release tag from the GitHub API
VERSION=$(curl -s "https://api.github.com/repos/$REPO/releases/latest" | grep '"tag_name":' | sed -E 's/.*"([^"]+)".*/\1/')
echo "No version specified, using latest: $VERSION"
fi

# Define variables
INSTALL_DIR="/usr/local/bin"
TMP_DIR=$(mktemp -d)

# Function to identify the OS and architecture, then construct the download URL
set_download_url() {
OS=$(uname -s)
ARCH=$(uname -m)
BASE_URL="https://github.com/$REPO/releases/download/$VERSION"

case "$OS" in
"Darwin")
case "$ARCH" in
"arm64")
# Apple Silicon
FILE_NAME="tgs-${VERSION}-aarch64-apple-darwin.tar.gz"
;;
"x86_64")
# Intel Mac
FILE_NAME="tgs-${VERSION}-x86_64-apple-darwin.tar.gz"
;;
*)
echo "Unsupported architecture: $ARCH"
exit 1
;;
esac
;;
"Linux")
# Assuming x86_64 for Linux, adjust if supporting other architectures
FILE_NAME="tgs-${VERSION}-x86_64-unknown-linux-gnu.tar.gz"
;;
*)
echo "Unsupported operating system: $OS"
exit 1
;;
esac

BIN_URL="${BASE_URL}/${FILE_NAME}"
}

# Download and install
download_and_install() {
echo "Downloading $BIN_URL"
curl -L $BIN_URL -o "$TMP_DIR/build.tar.gz"

echo "Extracting..."
tar -xzvf "$TMP_DIR/build.tar.gz" -C "$TMP_DIR"

echo "Installing..."
# Assuming the binary name is 'tgs', adjust if necessary
mv "$TMP_DIR/tgs" "$INSTALL_DIR"

echo "Cleanup..."
rm -rf "$TMP_DIR"

echo "Installation completed successfully."
}

# Main
set_download_url
download_and_install

0 comments on commit ba1bf90

Please sign in to comment.