Skip to content

Commit

Permalink
added option --run to launch.py.
Browse files Browse the repository at this point in the history
  • Loading branch information
b4rtaz committed Oct 14, 2024
1 parent 4587f55 commit d10699f
Showing 1 changed file with 10 additions and 3 deletions.
13 changes: 10 additions & 3 deletions launch.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,11 @@ def writeRunFile(modelName: str, command: str):

def printUsage():
print('Usage: python download-model.py <model>')
print()
print('Options:')
print(' <model> The name of the model to download')
print(' --run Run the model after download')
print()
print('Available models:')
for model in MODELS:
print(f' {model}')
Expand All @@ -109,6 +114,7 @@ def printUsage():
if modelName not in MODELS:
print(f'Model is not supported: {modelName}')
exit(1)
runAfterDownload = sys.argv.count('--run') > 0

model = MODELS[modelName]
(modelPath, tokenizerPath) = download(modelName, model)
Expand All @@ -123,15 +129,16 @@ def printUsage():
print('To run Distributed Llama you need to execute:')
print('--- copy start ---')
print()
print(command)
print('\033[96m' + command + '\033[0m')
print()
print('--- copy end -----')

runFilePath = writeRunFile(modelName, command)
print(f'🌻 Created {runFilePath} script to easy run')

result = input('❓ Do you want to run Distributed Llama? ("Y" if yes): ')
if (result.upper() == 'Y'):
if (not runAfterDownload):
runAfterDownload = input('❓ Do you want to run Distributed Llama? ("Y" if yes): ').lower() == 'Y'
if (runAfterDownload):
if (not os.path.isfile('dllama')):
os.system('make dllama')
os.system(command)

0 comments on commit d10699f

Please sign in to comment.