Use generic commit message when llama is offline

This commit is contained in:
Tanner Collin 2024-11-30 23:22:09 +00:00
parent a88fbe379c
commit 587bc67416

19
main.py
View File

@ -11,10 +11,21 @@ LLAMA_URL = 'http://10.55.0.105:11434/api/generate'
NOTES_DIR = '/home/tanner/notes-git/notes'
#NOTES_DIR = '/home/tanner/'
def controller_message(message):
payload = dict(home=message)
logging.info('Controller message: %s', message)
r = requests.post('https://tbot.tannercollin.com/message', data=payload, timeout=10)
if r.status_code == 200:
return True
else:
logging.exception('Unable to communicate with controller! Message: ' + message)
return False
def llama(prompt):
data = dict(model='llama3.1', prompt=prompt, stream=False)
try:
r = requests.post(LLAMA_URL, json=data, timeout=10)
r = requests.post(LLAMA_URL, json=data, timeout=20)
r.raise_for_status()
r = r.json()
return r['response']
@ -89,8 +100,10 @@ def main():
if message:
break
else: # for loop never broke
logging.info('Unable to generate acceptable message, exiting.')
exit(1)
msg = 'Unable to generate acceptable diff message.'
controller_message(msg)
logging.info(msg)
message = 'Auto-commit, can\'t generate message'
if DEBUG:
logging.info('Running in debug, not actually committing.')