Skip to content

test 1 stop

test 1 stop #7

name: Create HuggingFace Model Repository
# on:
# workflow_dispatch:
# inputs:
# model_name:
# description: "Name of the model to create (will be used in repo name and files)"
# required: true
# type: string
# prompt_template:
# description: "Prompt template for the model"
# required: true
# type: string
# stop_tokens:
# description: "Stop tokens for the model (comma-separated)"
# required: true
# type: string
# engine:
# description: "Engine to run the model (e.g., llama-cpp)"
# required: true
# type: string
# env:
# USER_NAME: cortexso
# MODEL_NAME: ${{ inputs.model_name }}
on:
push:
branches:
- add/create_modelyml_workflow
env:
USER_NAME: cortexso
MODEL_NAME: "test-model" # Default test value
PROMPT_TEMPLATE: "<|im_start|>system\n{system_message}<|im_end|>\n<|im_start|>user\n{prompt}<|im_end|>\n<|im_start|>assistant" # Default test value
STOP_TOKENS: "<|im_end|>" # Default test value
ENGINE: "llama-cpp" # Default test value
jobs:
create-repo:
runs-on: ubuntu-20-04-gguf
timeout-minutes: 7200
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.10'
- name: Cache Python packages
uses: actions/cache@v4
with:
path: |
~/.cache/pip
~/.local/share/pip
.venv
key: ${{ runner.os }}-pip-${{ github.sha }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install dependencies
run: |
pip install huggingface_hub PyYAML
git lfs install
- name: Create YAML files
run: |
python3 - << EOF
import yaml
import os
# Function to create a custom string representer
def quoted_presenter(dumper, data):
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='"')
# Register the custom presenter
yaml.add_representer(str, quoted_presenter)
# Process stop tokens
stop_tokens = os.environ['STOP_TOKENS'].split(',')
stop_tokens = [token.strip() for token in stop_tokens if token.strip()]
# Create model.yml content
model_content = {
'id': os.environ['MODEL_NAME'],
'model': os.environ['MODEL_NAME'],
'name': os.environ['MODEL_NAME'],
'version': 1,
'stop': stop_tokens,
'stream': True,
'top_p': 0.9,
'temperature': 0.7,
'frequency_penalty': 0,
'presence_penalty': 0,
'max_tokens': 4096,
'seed': -1,
'dynatemp_range': 0,
'dynatemp_exponent': 1,
'top_k': 40,
'min_p': 0.05,
'tfs_z': 1,
'typ_p': 1,
'repeat_last_n': 64,
'repeat_penalty': 1,
'mirostat': False,
'mirostat_tau': 5,
'mirostat_eta': 0.100000001,
'penalize_nl': False,
'ignore_eos': False,
'n_probs': 0,
'min_keep': 0,
'engine': os.environ['ENGINE'],
'prompt_template': os.environ['PROMPT_TEMPLATE'],
'ctx_len': 4096,
'ngl': 34
}
# Create the model.yml file with proper sections and comments
with open('model.yml', 'w') as f:
f.write("# BEGIN GENERAL GGUF METADATA\n")
yaml.dump({k: model_content[k] for k in ['id', 'model', 'name', 'version']},
f, default_flow_style=False, sort_keys=False)
f.write("\n# END GENERAL GGUF METADATA\n\n")
f.write("# BEGIN INFERENCE PARAMETERS\n# BEGIN REQUIRED\n")
yaml.dump({'stop': model_content['stop']}, f, default_flow_style=False)
f.write("# END REQUIRED\n\n")
f.write("# BEGIN OPTIONAL\n")
optional_params = dict((k, model_content[k]) for k in [
'stream', 'top_p', 'temperature', 'frequency_penalty',
'presence_penalty', 'max_tokens', 'seed', 'dynatemp_range',
'dynatemp_exponent', 'top_k', 'min_p', 'tfs_z', 'typ_p',
'repeat_last_n', 'repeat_penalty', 'mirostat', 'mirostat_tau',
'mirostat_eta', 'penalize_nl', 'ignore_eos', 'n_probs', 'min_keep'
])
yaml.dump(optional_params, f, default_flow_style=False)
f.write("# END OPTIONAL\n")
f.write("# END INFERENCE PARAMETERS\n\n")
f.write("# BEGIN MODEL LOAD PARAMETERS\n# BEGIN REQUIRED\n")
required_params = dict((k, model_content[k]) for k in [
'engine', 'prompt_template', 'ctx_len', 'ngl'
])
yaml.dump(required_params, f, default_flow_style=False)
f.write("# END OPTIONAL\n")
f.write("# END MODEL LOAD PARAMETERS\n")
# Create metadata.yml content
metadata_content = {
'version': 1,
'name': os.environ['MODEL_NAME'],
'default': '8b-gguf-q4-km'
}
with open('metadata.yml', 'w') as f:
yaml.dump(metadata_content, f, default_flow_style=False)
EOF
- name: Create HuggingFace Repository and Upload Files
env:
HF_TOKEN: ${{ secrets.HUGGINGFACE_TOKEN_WRITE }}
run: |
python3 - << EOF
from huggingface_hub import HfApi, create_repo
import os
# Initialize the Hugging Face API
api = HfApi(token=os.environ['HF_TOKEN'])
# Create the repository
repo_id = f"${{ env.USER_NAME }}/${{ env.MODEL_NAME }}"
try:
create_repo(repo_id, private=False, token=os.environ['HF_TOKEN'])
print(f"Created repository: {repo_id}")
except Exception as e:
print(f"Repository might already exist or error occurred: {e}")
# Upload the files
api.upload_file(
path_or_fileobj="model.yml",
path_in_repo="model.yml",
repo_id=repo_id,
token=os.environ['HF_TOKEN']
)
api.upload_file(
path_or_fileobj="metadata.yml",
path_in_repo="metadata.yml",
repo_id=repo_id,
token=os.environ['HF_TOKEN']
)
print("Files uploaded successfully")
EOF
- name: Cleanup
run: |
rm -f model.yml metadata.yml