Handle llama errors

This commit is contained in:
Tanner Collin 2024-09-26 17:50:03 +00:00
parent 7556607f89
commit 20a77a5838

16
main.py
View File

@ -13,9 +13,15 @@ NOTES_DIR = '/home/tanner/notes-git/notes'
def llama(prompt): def llama(prompt):
data = dict(model='llama3.1', prompt=prompt, stream=False) data = dict(model='llama3.1', prompt=prompt, stream=False)
r = requests.post(LLAMA_URL, json=data, timeout=10) try:
r = r.json() r = requests.post(LLAMA_URL, json=data, timeout=10)
return r['response'] r.raise_for_status()
r = r.json()
return r['response']
except BaseException as e:
logging.error('Problem with llama: {} - {}'.format(e.__class__.__name__, str(e)))
return False
def git_diff(): def git_diff():
result = subprocess.run(['git', 'diff', '--cached', '-U0'], stdout=subprocess.PIPE, cwd=NOTES_DIR) result = subprocess.run(['git', 'diff', '--cached', '-U0'], stdout=subprocess.PIPE, cwd=NOTES_DIR)
@ -49,6 +55,10 @@ Write in imperitive tense.
message = llama(prompt) message = llama(prompt)
if not message:
logging.info(' Failed to generate')
return False
logging.info('Generated message: %s', message) logging.info('Generated message: %s', message)
if '.md' in message.lower(): if '.md' in message.lower():