# model/tokenizer model_name: # REPLACE WITH LLAMA MODEL NAME tokenizer_name: # REPLACE WITH LLAMA MODEL NAME max_new_tokens: 512 temperature: 0.001 prompt: | #this code prints a string reversed my_string = "hello how are you" print(len(my_string)) My code above does not work. Can you help me?