Edit on GitHub

communex.module.example.openai

 1from communex.module import Module, endpoint
 2
 3# prompt: str = 'sup?',
 4# model: str = 'gpt-3.5-turbo',
 5# presence_penalty: float = 0.0,
 6# frequency_penalty: float = 0.0,
 7# temperature: float = 0.9,
 8# max_tokens: int = 100,def endpoint(fn: Any):
 9# top_p: float = 1,
10# choice_idx: int = 0,
11# api_key: str = None,
12# retry: bool = True,
13# role: str = 'user',
14# history: list = None,
15
16
17class OpenAI(Module):
18    @endpoint
19    def generate(self, prompt: str, model: str = 'gpt-3.5-turbo'):
20        print(f"Answering: `{prompt}` with model `{model}`")
class OpenAI(communex.module.module.Module):
18class OpenAI(Module):
19    @endpoint
20    def generate(self, prompt: str, model: str = 'gpt-3.5-turbo'):
21        print(f"Answering: `{prompt}` with model `{model}`")
@endpoint
def generate(self, prompt: str, model: str = 'gpt-3.5-turbo'):
19    @endpoint
20    def generate(self, prompt: str, model: str = 'gpt-3.5-turbo'):
21        print(f"Answering: `{prompt}` with model `{model}`")