# 99_helpers_test.ipynb

import importlib.util
assert importlib.util.find_spec("data401_nlp") is not None
from data401_nlp.helpers.env import load_env
from data401_nlp.helpers.llm import make_chat, LLM_MODELS


DEFAULT_MODEL = LLM_MODELS[0] # claude... look at helpers/01_llm.ipynb to change models. 
# If your preferred model is not present, send a pull request and I'll add it.
load_env()
chat = make_chat(DEFAULT_MODEL)
chat("Say hello in one sentence.")

Hello, it’s nice to meet you!

  • id: chatcmpl-29997df4-9364-41e9-8f9e-7b709a7ab092
  • model: claude-sonnet-4-5-20250929
  • finish_reason: stop
  • usage: Usage(completion_tokens=12, prompt_tokens=13, total_tokens=25, completion_tokens_details=None, prompt_tokens_details=PromptTokensDetailsWrapper(audio_tokens=None, cached_tokens=0, text_tokens=None, image_tokens=None, cache_creation_tokens=0, cache_creation_token_details=CacheCreationTokenDetails(ephemeral_5m_input_tokens=0, ephemeral_1h_input_tokens=0)), cache_creation_input_tokens=0, cache_read_input_tokens=0)
q1_answer = "A"
q2_answer = 42
q3_answer = ["token", "vector"]
from data401_nlp.helpers.submit import collect_answers

raw = collect_answers(path="99_helpers_test.ipynb")
assert len(raw) == 3
=== Student Responses ===

q1_answer = "A"
q2_answer = 42
q3_answer = ["token", "vector"]