Skip to content

Commit

Permalink
fix: updated temp scale to be 0-2; some type hints
Browse files Browse the repository at this point in the history
  • Loading branch information
ericrallen committed Sep 24, 2023
1 parent ff0c955 commit d6819b0
Show file tree
Hide file tree
Showing 3 changed files with 79 additions and 70 deletions.
6 changes: 3 additions & 3 deletions widgets/advanced.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def betterChatGptSentiment(prompt: str) -> str:


# convert sentiment score to label
def convertSentimentToLabel(sentiment):
def convertSentimentToLabel(sentiment: float) -> str:
# map the sentiment to a human readable label
if sentiment >= 0.75:
return "Very Positive"
Expand All @@ -99,15 +99,15 @@ def convertSentimentToLabel(sentiment):


# analyze the sentiment of a string of text
def analyzeBasicSentiment(text):
def analyzeBasicSentiment(text: str) -> dict[str, float]:
if not text:
return ""

# use VADER to get the +/- sentiment of the string
return analyzer.polarity_scores(text)


def getNRCEmotion(text):
def getNRCEmotion(text: str) -> list[tuple[str, float]]:
emotion = NRCLex(text)

return emotion.top_emotions
Expand Down
71 changes: 38 additions & 33 deletions widgets/config.py
Original file line number Diff line number Diff line change
@@ -1,57 +1,62 @@
import ipywidgets as widgets

openAiHeader = widgets.Label('OpenAI API', style=dict(font_size="1.2rem", font_weight="bold"))
hackerNewsHeader = widgets.Label('Hacker News API', style=dict(font_size="1.2rem", font_weight="bold"))
openAiHeader = widgets.Label(
"OpenAI API", style=dict(font_size="1.2rem", font_weight="bold")
)
hackerNewsHeader = widgets.Label(
"Hacker News API", style=dict(font_size="1.2rem", font_weight="bold")
)

apiKeyInput = widgets.Text(
value='',
placeholder='Enter your OpenAI API key',
description='API Key',
value="",
placeholder="Enter your OpenAI API key",
description="API Key",
)

apiKeyUpdateButton = widgets.Button(
description='Update Key',
disabled=False,
button_style='primary' if not apiKeyInput.value else 'danger',
description="Update Key",
disabled=False,
button_style="primary" if not apiKeyInput.value else "danger",
)

modelDropdown = widgets.Dropdown(
options=['gpt-3.5-turbo', 'gpt-4'],
value='gpt-3.5-turbo',
description='Model',
options=["gpt-3.5-turbo", "gpt-4"],
value="gpt-3.5-turbo",
description="Model",
)

temperatureSlider = widgets.FloatSlider(
value=0,
min=0,
max=1,
step=0.01,
description='Temp',
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='.2f',
value=0,
min=0,
max=2,
step=0.01,
description="Temp",
continuous_update=False,
orientation="horizontal",
readout=True,
readout_format=".2f",
)

sampleSizeSlider = widgets.IntSlider(
value=1,
min=1,
max=100,
step=1,
description='Samples',
continuous_update=False,
orientation='horizontal',
readout=True,
readout_format='d'
value=1,
min=1,
max=100,
step=1,
description="Samples",
continuous_update=False,
orientation="horizontal",
readout=True,
readout_format="d",
)

sampleSizeWarningLabelPrefix = widgets.Label(
value='Warning:',
style=dict(font_weight='bold')
value="Warning:", style=dict(font_weight="bold")
)

sampleSizeWarningLabelContent = widgets.Label(
value='Increasing sample size will result in higher costs.'
value="Increasing sample size will result in higher costs."
)

sampleSizeWarningLabel = widgets.HBox([sampleSizeWarningLabelPrefix, sampleSizeWarningLabelContent])
sampleSizeWarningLabel = widgets.HBox(
[sampleSizeWarningLabelPrefix, sampleSizeWarningLabelContent]
)
72 changes: 38 additions & 34 deletions widgets/tokens.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,56 +2,60 @@

import tiktoken

MODEL = 'gtp-3.5-turbo'
MODEL = "gtp-3.5-turbo"

def configureModel(model):
global MODEL
MODEL = model

def tokenize(text):
tokens = []
ids = []
def configureModel(model: str):
global MODEL
MODEL = model

# To get the tokeniser corresponding to a specific model in the OpenAI API:
encoding = tiktoken.encoding_for_model(MODEL)

tokenized = encoding.encode(text)
def tokenize(text: str) -> (list[str], list[int]):
tokens = []
ids = []

for tokenId in tokenized:
ids.append(tokenId)
tokens.append(encoding.decode_single_token_bytes(tokenId).decode('utf-8'))
# To get the tokeniser corresponding to a specific model in the OpenAI API:
encoding = tiktoken.encoding_for_model(MODEL)

return (tokens, ids)
tokenized = encoding.encode(text)

def getTokens(change):
(tokens, ids) = tokenize(change['new'].strip())
for tokenId in tokenized:
ids.append(tokenId)
tokens.append(encoding.decode_single_token_bytes(tokenId).decode("utf-8"))

if tokens:
with tokenCount:
tokenCount.clear_output(wait=True)
print(f"{len(tokens)} tokens")
return (tokens, ids)

with tokenIds:
tokenIds.clear_output(wait=True)
print(f"{ids}")

with tokenAnalysis:
tokenAnalysis.clear_output(wait=True)
print(f"{tokens}")
else:
tokenCount.clear_output()
tokenIds.clear_output()
tokenAnalysis.clear_output()
def getTokens(change: dict[str, str]):
(tokens, ids) = tokenize(change["new"].strip())

if tokens:
with tokenCount:
tokenCount.clear_output(wait=True)
print(f"{len(tokens)} tokens")

with tokenIds:
tokenIds.clear_output(wait=True)
print(f"{ids}")

with tokenAnalysis:
tokenAnalysis.clear_output(wait=True)
print(f"{tokens}")
else:
tokenCount.clear_output()
tokenIds.clear_output()
tokenAnalysis.clear_output()


tokenAnalysis = widgets.Output()
tokenIds = widgets.Output()
tokenCount = widgets.Output()

tokenString = widgets.Text(
value='',
placeholder='Type something',
value="",
placeholder="Type something",
)

tokenString.observe(getTokens, names='value')
tokenString.observe(getTokens, names="value")

tokenAnalysisWidget = widgets.VBox([tokenString, tokenCount, tokenAnalysis, tokenIds])
tokenAnalysisWidget = widgets.VBox([tokenString, tokenCount, tokenAnalysis, tokenIds])

0 comments on commit d6819b0

Please sign in to comment.