{ "model": "string", "prompt": "string", "max_tokens": 0, "temperature": 0, "top_p": 0, "n": 0, "stream": true, "stop": "string", "suffix": "string", "echo": true }