In [1]:
# I put the Google Drive mount first just to get the popup dialog box out of the way
from google.colab import drive
drive.mount('/content/gdrive')
Mounted at /content/gdrive
In [2]:
# !ls -l /content/gdrive
In [3]:
!pip install llama-cpp-python
Collecting llama-cpp-python
Downloading llama_cpp_python-0.3.3.tar.gz (64.5 MB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 64.5/64.5 MB 18.0 MB/s eta 0:00:00
Installing build dependencies ... done
Getting requirements to build wheel ... done
Installing backend dependencies ... done
Preparing metadata (pyproject.toml) ... done
Requirement already satisfied: typing-extensions>=4.5.0 in /usr/local/lib/python3.10/dist-packages (from llama-cpp-python) (4.12.2)
Requirement already satisfied: numpy>=1.20.0 in /usr/local/lib/python3.10/dist-packages (from llama-cpp-python) (1.26.4)
Collecting diskcache>=5.6.1 (from llama-cpp-python)
Downloading diskcache-5.6.3-py3-none-any.whl.metadata (20 kB)
Requirement already satisfied: jinja2>=2.11.3 in /usr/local/lib/python3.10/dist-packages (from llama-cpp-python) (3.1.4)
Requirement already satisfied: MarkupSafe>=2.0 in /usr/local/lib/python3.10/dist-packages (from jinja2>=2.11.3->llama-cpp-python) (3.0.2)
Downloading diskcache-5.6.3-py3-none-any.whl (45 kB)
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━ 45.5/45.5 kB 2.6 MB/s eta 0:00:00
Building wheels for collected packages: llama-cpp-python
Building wheel for llama-cpp-python (pyproject.toml) ... done
Created wheel for llama-cpp-python: filename=llama_cpp_python-0.3.3-cp310-cp310-linux_x86_64.whl size=3510152 sha256=feb052db57e8da3b893e4120c8ac64062df92a82d39f9f77fcd0f88d4aa827a0
Stored in directory: /root/.cache/pip/wheels/32/18/83/fbcc25ec6c7d60fdd0d36b481fe8262e428c3831dc5f03c7e8
Successfully built llama-cpp-python
Installing collected packages: diskcache, llama-cpp-python
Successfully installed diskcache-5.6.3 llama-cpp-python-0.3.3
In [4]:
#https://medium.com/analytics-vidhya/building-a-simple-chatbot-in-python-using-nltk-7c8c8215ac6e
import nltk
import numpy as np
import random
import string # to process standard python strings
import warnings
warnings.filterwarnings("ignore")
In [5]:
!wget -O llama-2-7b-chat.Q3_K_M.gguf "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q3_K_M.gguf"
--2024-12-09 04:48:25-- https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGUF/resolve/main/llama-2-7b-chat.Q3_K_M.gguf Resolving huggingface.co (huggingface.co)... 65.8.243.46, 65.8.243.92, 65.8.243.90, ... Connecting to huggingface.co (huggingface.co)|65.8.243.46|:443... connected. HTTP request sent, awaiting response... 302 Found Location: https://cdn-lfs.hf.co/repos/b0/ca/b0cae82fd4b3a362cab01d17953c45edac67d1c2dfb9fbb9e69c80c32dc2012e/87e0f473c491a1535c8ee84a113cd2ea0edba59d8bf0f1c30a89b5319e77451e?response-content-disposition=inline%3B+filename*%3DUTF-8%27%27llama-2-7b-chat.Q3_K_M.gguf%3B+filename%3D%22llama-2-7b-chat.Q3_K_M.gguf%22%3B&Expires=1733978905&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTczMzk3ODkwNX19LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy5oZi5jby9yZXBvcy9iMC9jYS9iMGNhZTgyZmQ0YjNhMzYyY2FiMDFkMTc5NTNjNDVlZGFjNjdkMWMyZGZiOWZiYjllNjljODBjMzJkYzIwMTJlLzg3ZTBmNDczYzQ5MWExNTM1YzhlZTg0YTExM2NkMmVhMGVkYmE1OWQ4YmYwZjFjMzBhODliNTMxOWU3NzQ1MWU%7EcmVzcG9uc2UtY29udGVudC1kaXNwb3NpdGlvbj0qIn1dfQ__&Signature=CpaO1hbKm60Ajp3N-pK7bbSqYADn4eLN5Z%7EqKlPv4y%7EQ9eKHBsxpFiuE7tLhpnuiWVVXn%7E-y%7E3-raPs6IJjy6xwi6gr%7Ei1njYlBrVOsRpUxyTBAkbaAs1rR0VQBm98f6MTUSXQaUUoRnGc6Rz6HmnQyIjyqJGgshaAhHlorNLud%7E3h1OsHNwvttGKlOpM%7Exb9kyb9ePykVJ04LzMVnyX8Dx2xKHpTlWW4OfEwUanc9l1pRju6N59-12HSemcBLyy-TRBq2N8Ne2ZyPj4UmwjXku%7EBsR4RkrPP%7E2HXk7QLaIwnm6fbAA%7EnbdckvuCuWoUY5f9Sf1EEDJBaVGqQ7v7zw__&Key-Pair-Id=K3RPWS32NSSJCE [following] --2024-12-09 04:48:25-- https://cdn-lfs.hf.co/repos/b0/ca/b0cae82fd4b3a362cab01d17953c45edac67d1c2dfb9fbb9e69c80c32dc2012e/87e0f473c491a1535c8ee84a113cd2ea0edba59d8bf0f1c30a89b5319e77451e?response-content-disposition=inline%3B+filename*%3DUTF-8%27%27llama-2-7b-chat.Q3_K_M.gguf%3B+filename%3D%22llama-2-7b-chat.Q3_K_M.gguf%22%3B&Expires=1733978905&Policy=eyJTdGF0ZW1lbnQiOlt7IkNvbmRpdGlvbiI6eyJEYXRlTGVzc1RoYW4iOnsiQVdTOkVwb2NoVGltZSI6MTczMzk3ODkwNX19LCJSZXNvdXJjZSI6Imh0dHBzOi8vY2RuLWxmcy5oZi5jby9yZXBvcy9iMC9jYS9iMGNhZTgyZmQ0YjNhMzYyY2FiMDFkMTc5NTNjNDVlZGFjNjdkMWMyZGZiOWZiYjllNjljODBjMzJkYzIwMTJlLzg3ZTBmNDczYzQ5MWExNTM1YzhlZTg0YTExM2NkMmVhMGVkYmE1OWQ4YmYwZjFjMzBhODliNTMxOWU3NzQ1MWU%7EcmVzcG9uc2UtY29udGVudC1kaXNwb3NpdGlvbj0qIn1dfQ__&Signature=CpaO1hbKm60Ajp3N-pK7bbSqYADn4eLN5Z%7EqKlPv4y%7EQ9eKHBsxpFiuE7tLhpnuiWVVXn%7E-y%7E3-raPs6IJjy6xwi6gr%7Ei1njYlBrVOsRpUxyTBAkbaAs1rR0VQBm98f6MTUSXQaUUoRnGc6Rz6HmnQyIjyqJGgshaAhHlorNLud%7E3h1OsHNwvttGKlOpM%7Exb9kyb9ePykVJ04LzMVnyX8Dx2xKHpTlWW4OfEwUanc9l1pRju6N59-12HSemcBLyy-TRBq2N8Ne2ZyPj4UmwjXku%7EBsR4RkrPP%7E2HXk7QLaIwnm6fbAA%7EnbdckvuCuWoUY5f9Sf1EEDJBaVGqQ7v7zw__&Key-Pair-Id=K3RPWS32NSSJCE Resolving cdn-lfs.hf.co (cdn-lfs.hf.co)... 18.154.101.56, 18.154.101.64, 18.154.101.58, ... Connecting to cdn-lfs.hf.co (cdn-lfs.hf.co)|18.154.101.56|:443... connected. HTTP request sent, awaiting response... 200 OK Length: 3298004672 (3.1G) [binary/octet-stream] Saving to: ‘llama-2-7b-chat.Q3_K_M.gguf’ llama-2-7b-chat.Q3_ 100%[===================>] 3.07G 52.4MB/s in 64s 2024-12-09 04:49:29 (48.8 MB/s) - ‘llama-2-7b-chat.Q3_K_M.gguf’ saved [3298004672/3298004672]
In [6]:
from llama_cpp import Llama
# Load the model
model = Llama(model_path="llama-2-7b-chat.Q3_K_M.gguf")
llama_model_loader: loaded meta data with 19 key-value pairs and 291 tensors from llama-2-7b-chat.Q3_K_M.gguf (version GGUF V2)
llama_model_loader: Dumping metadata keys/values. Note: KV overrides do not apply in this output.
llama_model_loader: - kv 0: general.architecture str = llama
llama_model_loader: - kv 1: general.name str = LLaMA v2
llama_model_loader: - kv 2: llama.context_length u32 = 4096
llama_model_loader: - kv 3: llama.embedding_length u32 = 4096
llama_model_loader: - kv 4: llama.block_count u32 = 32
llama_model_loader: - kv 5: llama.feed_forward_length u32 = 11008
llama_model_loader: - kv 6: llama.rope.dimension_count u32 = 128
llama_model_loader: - kv 7: llama.attention.head_count u32 = 32
llama_model_loader: - kv 8: llama.attention.head_count_kv u32 = 32
llama_model_loader: - kv 9: llama.attention.layer_norm_rms_epsilon f32 = 0.000001
llama_model_loader: - kv 10: general.file_type u32 = 12
llama_model_loader: - kv 11: tokenizer.ggml.model str = llama
llama_model_loader: - kv 12: tokenizer.ggml.tokens arr[str,32000] = ["<unk>", "<s>", "</s>", "<0x00>", "<...
llama_model_loader: - kv 13: tokenizer.ggml.scores arr[f32,32000] = [0.000000, 0.000000, 0.000000, 0.0000...
llama_model_loader: - kv 14: tokenizer.ggml.token_type arr[i32,32000] = [2, 3, 3, 6, 6, 6, 6, 6, 6, 6, 6, 6, ...
llama_model_loader: - kv 15: tokenizer.ggml.bos_token_id u32 = 1
llama_model_loader: - kv 16: tokenizer.ggml.eos_token_id u32 = 2
llama_model_loader: - kv 17: tokenizer.ggml.unknown_token_id u32 = 0
llama_model_loader: - kv 18: general.quantization_version u32 = 2
llama_model_loader: - type f32: 65 tensors
llama_model_loader: - type q3_K: 129 tensors
llama_model_loader: - type q4_K: 92 tensors
llama_model_loader: - type q5_K: 4 tensors
llama_model_loader: - type q6_K: 1 tensors
llm_load_vocab: control token: 2 '</s>' is not marked as EOG
llm_load_vocab: control token: 1 '<s>' is not marked as EOG
llm_load_vocab: special_eos_id is not in special_eog_ids - the tokenizer config may be incorrect
llm_load_vocab: special tokens cache size = 3
llm_load_vocab: token to piece cache size = 0.1684 MB
llm_load_print_meta: format = GGUF V2
llm_load_print_meta: arch = llama
llm_load_print_meta: vocab type = SPM
llm_load_print_meta: n_vocab = 32000
llm_load_print_meta: n_merges = 0
llm_load_print_meta: vocab_only = 0
llm_load_print_meta: n_ctx_train = 4096
llm_load_print_meta: n_embd = 4096
llm_load_print_meta: n_layer = 32
llm_load_print_meta: n_head = 32
llm_load_print_meta: n_head_kv = 32
llm_load_print_meta: n_rot = 128
llm_load_print_meta: n_swa = 0
llm_load_print_meta: n_embd_head_k = 128
llm_load_print_meta: n_embd_head_v = 128
llm_load_print_meta: n_gqa = 1
llm_load_print_meta: n_embd_k_gqa = 4096
llm_load_print_meta: n_embd_v_gqa = 4096
llm_load_print_meta: f_norm_eps = 0.0e+00
llm_load_print_meta: f_norm_rms_eps = 1.0e-06
llm_load_print_meta: f_clamp_kqv = 0.0e+00
llm_load_print_meta: f_max_alibi_bias = 0.0e+00
llm_load_print_meta: f_logit_scale = 0.0e+00
llm_load_print_meta: n_ff = 11008
llm_load_print_meta: n_expert = 0
llm_load_print_meta: n_expert_used = 0
llm_load_print_meta: causal attn = 1
llm_load_print_meta: pooling type = 0
llm_load_print_meta: rope type = 0
llm_load_print_meta: rope scaling = linear
llm_load_print_meta: freq_base_train = 10000.0
llm_load_print_meta: freq_scale_train = 1
llm_load_print_meta: n_ctx_orig_yarn = 4096
llm_load_print_meta: rope_finetuned = unknown
llm_load_print_meta: ssm_d_conv = 0
llm_load_print_meta: ssm_d_inner = 0
llm_load_print_meta: ssm_d_state = 0
llm_load_print_meta: ssm_dt_rank = 0
llm_load_print_meta: ssm_dt_b_c_rms = 0
llm_load_print_meta: model type = 7B
llm_load_print_meta: model ftype = Q3_K - Medium
llm_load_print_meta: model params = 6.74 B
llm_load_print_meta: model size = 3.07 GiB (3.91 BPW)
llm_load_print_meta: general.name = LLaMA v2
llm_load_print_meta: BOS token = 1 '<s>'
llm_load_print_meta: EOS token = 2 '</s>'
llm_load_print_meta: UNK token = 0 '<unk>'
llm_load_print_meta: LF token = 13 '<0x0A>'
llm_load_print_meta: EOG token = 2 '</s>'
llm_load_print_meta: max token length = 48
llm_load_tensors: tensor 'token_embd.weight' (q3_K) (and 290 others) cannot be used with preferred buffer type CPU_AARCH64, using CPU instead
llm_load_tensors: CPU_Mapped model buffer size = 3144.52 MiB
..................................................................................................
llama_new_context_with_model: n_seq_max = 1
llama_new_context_with_model: n_ctx = 512
llama_new_context_with_model: n_ctx_per_seq = 512
llama_new_context_with_model: n_batch = 512
llama_new_context_with_model: n_ubatch = 512
llama_new_context_with_model: flash_attn = 0
llama_new_context_with_model: freq_base = 10000.0
llama_new_context_with_model: freq_scale = 1
llama_new_context_with_model: n_ctx_per_seq (512) < n_ctx_train (4096) -- the full capacity of the model will not be utilized
llama_kv_cache_init: CPU KV buffer size = 256.00 MiB
llama_new_context_with_model: KV self size = 256.00 MiB, K (f16): 128.00 MiB, V (f16): 128.00 MiB
llama_new_context_with_model: CPU output buffer size = 0.12 MiB
llama_new_context_with_model: CPU compute buffer size = 70.50 MiB
llama_new_context_with_model: graph nodes = 1030
llama_new_context_with_model: graph splits = 1
CPU : SSE3 = 1 | SSSE3 = 1 | AVX = 1 | AVX2 = 1 | F16C = 1 | FMA = 1 | AVX512 = 1 | LLAMAFILE = 1 | OPENMP = 1 | AARCH64_REPACK = 1 |
Model metadata: {'tokenizer.ggml.unknown_token_id': '0', 'tokenizer.ggml.eos_token_id': '2', 'general.architecture': 'llama', 'llama.context_length': '4096', 'general.name': 'LLaMA v2', 'llama.embedding_length': '4096', 'llama.feed_forward_length': '11008', 'llama.attention.layer_norm_rms_epsilon': '0.000001', 'llama.rope.dimension_count': '128', 'llama.attention.head_count': '32', 'tokenizer.ggml.bos_token_id': '1', 'llama.block_count': '32', 'llama.attention.head_count_kv': '32', 'general.quantization_version': '2', 'tokenizer.ggml.model': 'llama', 'general.file_type': '12'}
Using fallback chat format: llama-2
In [7]:
nltk.download('punkt_tab')
[nltk_data] Downloading package punkt_tab to /root/nltk_data... [nltk_data] Unzipping tokenizers/punkt_tab.zip.
Out[7]:
True
In [10]:
import pandas as pd
# Read the CSV file into a pandas dataframe
df = pd.read_csv('/content/gdrive/MyDrive/BGG_Final_Dataset.csv') # Google Drive
# Download NLTK data files (only needed for first-time use)
nltk.download('punkt') # Tokenizer
nltk.download('wordnet') # WordNet Lemmatizer
nltk.download('stopwords') # Stopwords
# Extract the 'Summary' column into a list
# Replace 'Summary' with the appropriate column name from your dataset if different
from_csv = df['Summary'].tolist()
# Clean the text by replacing newlines with spaces
for i in range(len(from_csv)):
from_csv[i] = from_csv[i].replace("\n", " ")
# Combine all text into a single string
all_text = " ".join(from_csv)
raw = all_text.lower()
# Tokenize the text into sentences and words
page_tokens = from_csv # Tokens for the entire paragraphs or pages
word_tokens = nltk.word_tokenize(raw) # Converts to a list of words
[nltk_data] Downloading package punkt to /root/nltk_data... [nltk_data] Package punkt is already up-to-date! [nltk_data] Downloading package wordnet to /root/nltk_data... [nltk_data] Package wordnet is already up-to-date! [nltk_data] Downloading package stopwords to /root/nltk_data... [nltk_data] Package stopwords is already up-to-date!
In [11]:
# https://stackoverflow.com/questions/17390326/getting-rid-of-stop-words-and-document-tokenization-using-nltk
from nltk.corpus import stopwords
from nltk import word_tokenize
lemmer = nltk.stem.WordNetLemmatizer()
#WordNet is a semantically-oriented dictionary of English included in NLTK.
def LemTokens(tokens):
return [lemmer.lemmatize(token) for token in tokens]
remove_punct_dict = dict((ord(punct), None) for punct in string.punctuation)
def LemNormalize(text):
stop = set(stopwords.words('english') + list(string.punctuation))
text2= ' '.join([i for i in word_tokenize(text.lower()) if i not in stop])
return LemTokens(nltk.word_tokenize(text2.lower().translate(remove_punct_dict)))
In [12]:
GREETING_INPUTS = ("hello", "hi", "greetings", "sup", "what's up","hey",)
GREETING_RESPONSES = ["hi", "hey", "*nods*", "hi there", "hello", "I am glad! You are talking to me"]
def greeting(sentence):
for word in sentence.split():
if word.lower() in GREETING_INPUTS:
return random.choice(GREETING_RESPONSES)
In [13]:
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
In [14]:
# https://stackoverflow.com/questions/17390326/getting-rid-of-stop-words-and-document-tokenization-using-nltk
def response(user_response):
robo_response=''
# TfidfVec = TfidfVectorizer(tokenizer=LemNormalize, stop_words='english')
TfidfVec = TfidfVectorizer(tokenizer=LemNormalize)
tfidf = TfidfVec.fit_transform(page_tokens)
vals = cosine_similarity(tfidf[-1], tfidf)
idx=vals.argsort()[0][-2]
flat = vals.flatten()
flat.sort()
req_tfidf = flat[-2]
if(req_tfidf==0):
robo_response=robo_response+"I am sorry! I don't understand you"
return robo_response
else:
# found page, now search for a sentence on the page
TfidfVec2 = TfidfVectorizer(tokenizer=LemNormalize)
#print(page_tokens[idx])
specific_sent_tokens2 = nltk.sent_tokenize(page_tokens[idx])
specific_sent_tokens2.append(user_response)
#word_tokens=word_tokens+nltk.word_tokenize(user_response)
tfidf2 = TfidfVec2.fit_transform(specific_sent_tokens2)
vals2 = cosine_similarity(tfidf2[-1], tfidf2)
idx2=vals2.argsort()[0][-2]
flat2 = vals2.flatten()
flat2.sort()
req_tfidf2 = flat2[-2]
if(req_tfidf2==0):
robo_response=robo_response+"I am sorry! I don't understand you"
return robo_response
# Perform inference
input= "PROMPT: Answer the QUESTION from the CONTEXT. CONTEXT: "+ specific_sent_tokens2[idx2] + " QUESTION: "+ user_response
print ("DEBUGGING: Input: "+ input+"\n")
output = model(input, max_tokens=1000, temperature=0.7, top_k=50, top_p=0.9)
# Extract the text from the 'choices' list
response_text = output['choices'][0]['text']
# Print the extracted text
#print(response_text)
#robo_response = robo_response+specific_sent_tokens2[idx2]
robo_response = robo_response+response_text
return robo_response
In [ ]:
flag=True
print("\nBOLO: My name is Bolo, the Board Game Master. I will answer your queries. If you want to exit, type Bye")
while(flag==True):
user_response = input()
user_response=user_response.lower()
if(user_response!='bye'):
if(user_response=='thanks' or user_response=='thank you' ):
flag=False
print("\nBOLO: You are welcome..")
else:
if(greeting(user_response)!=None):
print("\nBOLO: "+greeting(user_response))
else:
page_tokens.append(user_response)
word_tokens=word_tokens+nltk.word_tokenize(user_response)
final_words=list(set(word_tokens))
print()
print("\nBOLO: ",end="")
print(response(user_response))
page_tokens.remove(user_response)
print()
else:
flag=False
print("\nBOLO: Bye! take care..")
BOLO: My name is Bolo, the Board Game Master. I will answer your queries. If you want to exit, type Bye Can you recommend me a highly rated game for four players that require engine building? BOLO: DEBUGGING: Input: PROMPT: Answer the QUESTION from the CONTEXT. CONTEXT: Now four empires rise to dominance. QUESTION: can you recommend me a highly rated game for four players that require engine building?
Llama.generate: 19 prefix-match hit, remaining 32 prompt tokens to eval llama_perf_context_print: load time = 1130.71 ms llama_perf_context_print: prompt eval time = 0.00 ms / 32 tokens ( 0.00 ms per token, inf tokens per second) llama_perf_context_print: eval time = 0.00 ms / 279 runs ( 0.00 ms per token, inf tokens per second) llama_perf_context_print: total time = 28668.02 ms / 311 tokens
ANSWER: Certainly! Based on the context you provided, I would highly recommend the game "Terraforming Mars" for four players that require engine building. Terraforming Mars is a popular and highly rated game that challenges players to build and manage their own interplanetary corporation, with the ultimate goal of terraforming the red planet Mars into a habitable environment for human life. The game is designed for 1-4 players, and each player must build and manage their own engine to score victory points. The gameplay involves designing and constructing various systems on Mars, such as atmosphere processors, greenhouse complexes, and transportation networks, while managing resources and budget constraints. Players must also manage their engine by upgrading and maintaining it to ensure maximum efficiency and productivity. Terraforming Mars has received widespread critical acclaim for its immersive gameplay, beautiful components, and engaging theme. It has been praised for its high level of replayability, as the game's outcome can vary significantly depending on the player's choices and strategies. In conclusion, if you're looking for a highly rated game that requires engine building for four players, Terraforming Mars is an excellent choice. I need a game for children that is less than 30 minutes. BOLO: DEBUGGING: Input: PROMPT: Answer the QUESTION from the CONTEXT. CONTEXT: Summary: Factions struggle for control of the planet Dune -- playable in less than 30 minutes!. QUESTION: i need a game for children that is less than 30 minutes.
Llama.generate: 19 prefix-match hit, remaining 44 prompt tokens to eval llama_perf_context_print: load time = 1130.71 ms llama_perf_context_print: prompt eval time = 0.00 ms / 44 tokens ( 0.00 ms per token, inf tokens per second) llama_perf_context_print: eval time = 0.00 ms / 369 runs ( 0.00 ms per token, inf tokens per second) llama_perf_context_print: total time = 38008.21 ms / 413 tokens
What kind of game would be best for this? ANSWER: For a game that is less than 30 minutes long and suitable for children, you could consider a simple cooperative game. These types of games typically require players to work together to achieve a common goal, rather than competing against each other. Some examples of cooperative games that could be suitable for children include: 1. Forbidden Island: In this game, players work together to retrieve four treasures from an island that is sinking. The game is simple to learn and can be completed in around 20-30 minutes. 2. Forbidden Desert: Similar to Forbidden Island, this game takes place in a desert where players must work together to survive and retrieve treasures before the sandstorm ends. 3. Ticket to Ride: In this game, players compete to build train routes across a map of North America, but with a cooperative twist. Players must work together to complete their routes and score points. 4. Sushi Go!: This fast-paced card game requires players to collect sets of sushi cards to earn points. The game is easy to learn and can be completed in around 20-30 minutes. 5. Qwirkle: In this abstract strategy game, players must create lines of matching colors and shapes to score points. The game is simple to learn and can be completed in around 20-30 minutes. These types of games are designed to be quick and easy to learn, while still providing a sense of accomplishment and fun for children. They also promote teamwork and cooperation, which can be beneficial for developing social skills and relationships.
In [ ]:
# What is the employee reduced tuition benefit?
# What is reduced tuition benefit for my spouse for undergraduate classes?
# What PPOs are available?
# When am I eligible for benefits?
# Suppose I am a young adult starting my career at Northwestern, which PPO should I choose and why?
# ^-- I'm surprised that the chatbot gave me no response the first time I typed this question in,
# but it did respond the second time I typed in the question.
In [ ]:
!pip freeze
absl-py==1.4.0 accelerate==1.1.1 aiohappyeyeballs==2.4.3 aiohttp==3.10.10 aiosignal==1.3.1 alabaster==1.0.0 albucore==0.0.19 albumentations==1.4.20 altair==4.2.2 annotated-types==0.7.0 anyio==3.7.1 argon2-cffi==23.1.0 argon2-cffi-bindings==21.2.0 array_record==0.5.1 arviz==0.20.0 astropy==6.1.5 astropy-iers-data==0.2024.11.4.0.33.34 astunparse==1.6.3 async-timeout==4.0.3 atpublic==4.1.0 attrs==24.2.0 audioread==3.0.1 autograd==1.7.0 babel==2.16.0 backcall==0.2.0 beautifulsoup4==4.12.3 bigframes==1.25.0 bigquery-magics==0.4.0 bleach==6.2.0 blinker==1.9.0 blis==0.7.11 blosc2==2.7.1 bokeh==3.6.1 Bottleneck==1.4.2 bqplot==0.12.43 branca==0.8.0 CacheControl==0.14.1 cachetools==5.5.0 catalogue==2.0.10 certifi==2024.8.30 cffi==1.17.1 chardet==5.2.0 charset-normalizer==3.4.0 chex==0.1.87 clarabel==0.9.0 click==8.1.7 cloudpathlib==0.20.0 cloudpickle==3.1.0 cmake==3.30.5 cmdstanpy==1.2.4 colorcet==3.1.0 colorlover==0.3.0 colour==0.1.5 community==1.0.0b1 confection==0.1.5 cons==0.4.6 contourpy==1.3.0 cryptography==43.0.3 cuda-python==12.2.1 cudf-cu12 @ https://pypi.nvidia.com/cudf-cu12/cudf_cu12-24.10.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl cufflinks==0.17.3 cupy-cuda12x==12.2.0 cvxopt==1.3.2 cvxpy==1.5.3 cycler==0.12.1 cymem==2.0.8 Cython==3.0.11 dask==2024.10.0 datascience==0.17.6 db-dtypes==1.3.0 dbus-python==1.2.18 debugpy==1.8.0 decorator==4.4.2 defusedxml==0.7.1 Deprecated==1.2.14 diffusers==0.31.0 diskcache==5.6.3 distro==1.9.0 dlib==19.24.2 dm-tree==0.1.8 docker-pycreds==0.4.0 docstring_parser==0.16 docutils==0.21.2 dopamine_rl==4.0.9 duckdb==1.1.3 earthengine-api==1.2.0 easydict==1.13 ecos==2.0.14 editdistance==0.8.1 eerepr==0.0.4 einops==0.8.0 en-core-web-sm @ https://github.com/explosion/spacy-models/releases/download/en_core_web_sm-3.7.1/en_core_web_sm-3.7.1-py3-none-any.whl#sha256=86cc141f63942d4b2c5fcee06630fd6f904788d2f0ab005cce45aadb8fb73889 entrypoints==0.4 et_xmlfile==2.0.0 etils==1.10.0 etuples==0.3.9 eval_type_backport==0.2.0 exceptiongroup==1.2.2 fastai==2.7.18 fastcore==1.7.19 fastdownload==0.0.7 fastjsonschema==2.20.0 fastprogress==1.0.3 fastrlock==0.8.2 filelock==3.16.1 firebase-admin==6.5.0 Flask==3.0.3 flatbuffers==24.3.25 flax==0.8.5 folium==0.18.0 fonttools==4.54.1 frozendict==2.4.6 frozenlist==1.5.0 fsspec==2024.10.0 future==1.0.0 gast==0.6.0 gcsfs==2024.10.0 GDAL==3.6.4 gdown==5.2.0 geemap==0.35.1 gensim==4.3.3 geocoder==1.38.1 geographiclib==2.0 geopandas==1.0.1 geopy==2.4.1 gin-config==0.5.0 gitdb==4.0.11 GitPython==3.1.43 glob2==0.7 google==2.0.3 google-ai-generativelanguage==0.6.10 google-api-core==2.19.2 google-api-python-client==2.151.0 google-auth==2.27.0 google-auth-httplib2==0.2.0 google-auth-oauthlib==1.2.1 google-cloud-aiplatform==1.71.1 google-cloud-bigquery==3.25.0 google-cloud-bigquery-connection==1.16.1 google-cloud-bigquery-storage==2.27.0 google-cloud-bigtable==2.26.0 google-cloud-core==2.4.1 google-cloud-datastore==2.20.1 google-cloud-firestore==2.19.0 google-cloud-functions==1.18.0 google-cloud-iam==2.16.0 google-cloud-language==2.15.0 google-cloud-pubsub==2.26.1 google-cloud-resource-manager==1.13.0 google-cloud-storage==2.8.0 google-cloud-translate==3.17.0 google-colab @ file:///colabtools/dist/google_colab-1.0.0.tar.gz google-crc32c==1.6.0 google-generativeai==0.8.3 google-pasta==0.2.0 google-resumable-media==2.7.2 googleapis-common-protos==1.65.0 googledrivedownloader==0.4 graphviz==0.20.3 greenlet==3.1.1 grpc-google-iam-v1==0.13.1 grpcio==1.67.1 grpcio-status==1.62.3 gspread==6.0.2 gspread-dataframe==3.3.1 gym==0.25.2 gym-notices==0.0.8 h11==0.14.0 h5netcdf==1.4.0 h5py==3.12.1 holidays==0.60 holoviews==1.20.0 html5lib==1.1 httpcore==1.0.6 httpimport==1.4.0 httplib2==0.22.0 httpx==0.27.2 huggingface-hub==0.26.2 humanize==4.11.0 hyperopt==0.2.7 ibis-framework==9.2.0 idna==3.10 imageio==2.36.0 imageio-ffmpeg==0.5.1 imagesize==1.4.1 imbalanced-learn==0.12.4 imgaug==0.4.0 immutabledict==4.2.0 importlib_metadata==8.5.0 importlib_resources==6.4.5 imutils==0.5.4 inflect==7.4.0 iniconfig==2.0.0 intel-cmplr-lib-ur==2025.0.0 intel-openmp==2025.0.0 ipyevents==2.0.2 ipyfilechooser==0.6.0 ipykernel==5.5.6 ipyleaflet==0.19.2 ipyparallel==8.8.0 ipython==7.34.0 ipython-genutils==0.2.0 ipython-sql==0.5.0 ipytree==0.2.2 ipywidgets==7.7.1 itsdangerous==2.2.0 jax==0.4.33 jax-cuda12-pjrt==0.4.33 jax-cuda12-plugin==0.4.33 jaxlib==0.4.33 jeepney==0.7.1 jellyfish==1.1.0 jieba==0.42.1 Jinja2==3.1.4 jiter==0.7.0 joblib==1.4.2 jsonpatch==1.33 jsonpickle==3.4.2 jsonpointer==3.0.0 jsonschema==4.23.0 jsonschema-specifications==2024.10.1 jupyter-client==6.1.12 jupyter-console==6.1.0 jupyter-leaflet==0.19.2 jupyter-server==1.24.0 jupyter_core==5.7.2 jupyterlab_pygments==0.3.0 jupyterlab_widgets==3.0.13 kaggle==1.6.17 kagglehub==0.3.3 keras==3.4.1 keyring==23.5.0 kiwisolver==1.4.7 langchain==0.3.7 langchain-core==0.3.15 langchain-text-splitters==0.3.2 langcodes==3.4.1 langsmith==0.1.142 language_data==1.2.0 launchpadlib==1.10.16 lazr.restfulclient==0.14.4 lazr.uri==1.0.6 lazy_loader==0.4 libclang==18.1.1 libcudf-cu12 @ https://pypi.nvidia.com/libcudf-cu12/libcudf_cu12-24.10.1-py3-none-manylinux_2_28_x86_64.whl librosa==0.10.2.post1 lightgbm==4.5.0 linkify-it-py==2.0.3 llama_cpp_python==0.3.1 llvmlite==0.43.0 locket==1.0.0 logical-unification==0.4.6 lxml==5.3.0 marisa-trie==1.2.1 Markdown==3.7 markdown-it-py==3.0.0 MarkupSafe==3.0.2 matplotlib==3.8.0 matplotlib-inline==0.1.7 matplotlib-venn==1.1.1 mdit-py-plugins==0.4.2 mdurl==0.1.2 miniKanren==1.0.3 missingno==0.5.2 mistune==3.0.2 mizani==0.13.0 mkl==2025.0.0 ml-dtypes==0.4.1 mlxtend==0.23.2 more-itertools==10.5.0 moviepy==1.0.3 mpmath==1.3.0 msgpack==1.1.0 multidict==6.1.0 multipledispatch==1.0.0 multitasking==0.0.11 murmurhash==1.0.10 music21==9.3.0 namex==0.0.8 natsort==8.4.0 nbclassic==1.1.0 nbclient==0.10.0 nbconvert==7.16.4 nbformat==5.10.4 ndindex==1.9.2 nest-asyncio==1.6.0 networkx==3.4.2 nibabel==5.3.2 nltk==3.9.1 notebook==6.5.5 notebook_shim==0.2.4 numba==0.60.0 numexpr==2.10.1 numpy==1.26.4 nvidia-cublas-cu12==12.6.3.3 nvidia-cuda-cupti-cu12==12.6.80 nvidia-cuda-nvcc-cu12==12.6.77 nvidia-cuda-runtime-cu12==12.6.77 nvidia-cudnn-cu12==9.5.1.17 nvidia-cufft-cu12==11.3.0.4 nvidia-curand-cu12==10.3.7.77 nvidia-cusolver-cu12==11.7.1.2 nvidia-cusparse-cu12==12.5.4.2 nvidia-nccl-cu12==2.23.4 nvidia-nvjitlink-cu12==12.6.77 nvtx==0.2.10 nx-cugraph-cu12 @ https://pypi.nvidia.com/nx-cugraph-cu12/nx_cugraph_cu12-24.10.0-py3-none-any.whl oauth2client==4.1.3 oauthlib==3.2.2 openai==1.54.3 opencv-contrib-python==4.10.0.84 opencv-python==4.10.0.84 opencv-python-headless==4.10.0.84 openpyxl==3.1.5 opentelemetry-api==1.28.0 opentelemetry-sdk==1.28.0 opentelemetry-semantic-conventions==0.49b0 opt_einsum==3.4.0 optax==0.2.3 optree==0.13.0 orbax-checkpoint==0.6.4 orjson==3.10.11 osqp==0.6.7.post3 packaging==24.2 pandas==2.2.2 pandas-datareader==0.10.0 pandas-gbq==0.24.0 pandas-stubs==2.2.2.240909 pandocfilters==1.5.1 panel==1.5.3 param==2.1.1 parso==0.8.4 parsy==2.1 partd==1.4.2 pathlib==1.0.1 patsy==0.5.6 peewee==3.17.7 peft==0.13.2 pexpect==4.9.0 pickleshare==0.7.5 pillow==11.0.0 platformdirs==4.3.6 plotly==5.24.1 plotnine==0.14.1 pluggy==1.5.0 polars==1.9.0 pooch==1.8.2 portpicker==1.5.2 preshed==3.0.9 prettytable==3.12.0 proglog==0.1.10 progressbar2==4.5.0 prometheus_client==0.21.0 promise==2.3 prompt_toolkit==3.0.48 propcache==0.2.0 prophet==1.1.6 proto-plus==1.25.0 protobuf==4.25.5 psutil==5.9.5 psycopg2==2.9.10 ptyprocess==0.7.0 py-cpuinfo==9.0.0 py4j==0.10.9.7 pyarrow==17.0.0 pyarrow-hotfix==0.6 pyasn1==0.6.1 pyasn1_modules==0.4.1 pycocotools==2.0.8 pycparser==2.22 pydantic==2.9.2 pydantic_core==2.23.4 pydata-google-auth==1.8.2 pydot==3.0.2 pydotplus==2.0.2 PyDrive==1.3.1 PyDrive2==1.21.1 pyerfa==2.0.1.4 pygame==2.6.1 pygit2==1.16.0 Pygments==2.18.0 PyGObject==3.42.1 PyJWT==2.9.0 pylibcudf-cu12 @ https://pypi.nvidia.com/pylibcudf-cu12/pylibcudf_cu12-24.10.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl pylibcugraph-cu12==24.10.0 pylibraft-cu12==24.10.0 pymc==5.18.0 pymystem3==0.2.0 pynvjitlink-cu12==0.4.0 pyogrio==0.10.0 PyOpenGL==3.1.7 pyOpenSSL==24.2.1 pyparsing==3.2.0 pyperclip==1.9.0 pyproj==3.7.0 pyshp==2.3.1 PySocks==1.7.1 pyspark==3.5.3 pytensor==2.25.5 pytest==8.3.3 python-apt==0.0.0 python-box==7.2.0 python-dateutil==2.8.2 python-louvain==0.16 python-slugify==8.0.4 python-utils==3.9.0 pytz==2024.2 pyviz_comms==3.0.3 PyYAML==6.0.2 pyzmq==24.0.1 qdldl==0.1.7.post4 ratelim==0.1.6 referencing==0.35.1 regex==2024.9.11 requests==2.32.3 requests-oauthlib==1.3.1 requests-toolbelt==1.0.0 requirements-parser==0.9.0 rich==13.9.4 rmm-cu12==24.10.0 rpds-py==0.21.0 rpy2==3.4.2 rsa==4.9 safetensors==0.4.5 scikit-image==0.24.0 scikit-learn==1.5.2 scipy==1.13.1 scooby==0.10.0 scs==3.2.7 seaborn==0.13.2 SecretStorage==3.3.1 Send2Trash==1.8.3 sentence-transformers==3.2.1 sentencepiece==0.2.0 sentry-sdk==2.18.0 setproctitle==1.3.3 shap==0.46.0 shapely==2.0.6 shellingham==1.5.4 simple-parsing==0.1.6 six==1.16.0 sklearn-pandas==2.2.0 slicer==0.0.8 smart-open==7.0.5 smmap==5.0.1 sniffio==1.3.1 snowballstemmer==2.2.0 soundfile==0.12.1 soupsieve==2.6 soxr==0.5.0.post1 spacy==3.7.5 spacy-legacy==3.0.12 spacy-loggers==1.0.5 Sphinx==8.1.3 sphinxcontrib-applehelp==2.0.0 sphinxcontrib-devhelp==2.0.0 sphinxcontrib-htmlhelp==2.1.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==2.0.0 sphinxcontrib-serializinghtml==2.0.0 SQLAlchemy==2.0.36 sqlglot==25.1.0 sqlparse==0.5.1 srsly==2.4.8 stanio==0.5.1 statsmodels==0.14.4 StrEnum==0.4.15 stringzilla==3.10.8 sympy==1.13.1 tables==3.10.1 tabulate==0.9.0 tbb==2022.0.0 tcmlib==1.2.0 tenacity==9.0.0 tensorboard==2.17.0 tensorboard-data-server==0.7.2 tensorflow==2.17.0 tensorflow-datasets==4.9.7 tensorflow-hub==0.16.1 tensorflow-io-gcs-filesystem==0.37.1 tensorflow-metadata==1.13.1 tensorflow-probability==0.24.0 tensorstore==0.1.67 termcolor==2.5.0 terminado==0.18.1 text-unidecode==1.3 textblob==0.17.1 tf-slim==1.1.0 tf_keras==2.17.0 thinc==8.2.5 threadpoolctl==3.5.0 tifffile==2024.9.20 timm==1.0.11 tinycss2==1.4.0 tokenizers==0.20.3 toml==0.10.2 tomli==2.0.2 toolz==0.12.1 torch @ https://download.pytorch.org/whl/cu121_full/torch-2.5.0%2Bcu121-cp310-cp310-linux_x86_64.whl torchaudio @ https://download.pytorch.org/whl/cu121_full/torchaudio-2.5.0%2Bcu121-cp310-cp310-linux_x86_64.whl torchsummary==1.5.1 torchvision @ https://download.pytorch.org/whl/cu121_full/torchvision-0.20.0%2Bcu121-cp310-cp310-linux_x86_64.whl tornado==6.3.3 tqdm==4.66.6 traitlets==5.7.1 traittypes==0.2.1 transformers==4.46.2 tweepy==4.14.0 typeguard==4.4.1 typer==0.13.0 types-pytz==2024.2.0.20241003 types-setuptools==75.3.0.20241112 typing_extensions==4.12.2 tzdata==2024.2 tzlocal==5.2 uc-micro-py==1.0.3 umf==0.9.0 uritemplate==4.1.1 urllib3==2.2.3 vega-datasets==0.9.0 wadllib==1.3.6 wandb==0.18.6 wasabi==1.1.3 wcwidth==0.2.13 weasel==0.4.1 webcolors==24.8.0 webencodings==0.5.1 websocket-client==1.8.0 Werkzeug==3.1.3 widgetsnbextension==3.6.10 wordcloud==1.9.3 wrapt==1.16.0 xarray==2024.10.0 xarray-einstats==0.8.0 xgboost==2.1.2 xlrd==2.0.1 xyzservices==2024.9.0 yarl==1.17.1 yellowbrick==1.5 yfinance==0.2.48 zipp==3.20.2
In [ ]:
!pip list
Package Version ---------------------------------- ------------------- absl-py 1.4.0 accelerate 1.1.1 aiohappyeyeballs 2.4.3 aiohttp 3.10.10 aiosignal 1.3.1 alabaster 1.0.0 albucore 0.0.19 albumentations 1.4.20 altair 4.2.2 annotated-types 0.7.0 anyio 3.7.1 argon2-cffi 23.1.0 argon2-cffi-bindings 21.2.0 array_record 0.5.1 arviz 0.20.0 astropy 6.1.5 astropy-iers-data 0.2024.11.4.0.33.34 astunparse 1.6.3 async-timeout 4.0.3 atpublic 4.1.0 attrs 24.2.0 audioread 3.0.1 autograd 1.7.0 babel 2.16.0 backcall 0.2.0 beautifulsoup4 4.12.3 bigframes 1.25.0 bigquery-magics 0.4.0 bleach 6.2.0 blinker 1.9.0 blis 0.7.11 blosc2 2.7.1 bokeh 3.6.1 Bottleneck 1.4.2 bqplot 0.12.43 branca 0.8.0 CacheControl 0.14.1 cachetools 5.5.0 catalogue 2.0.10 certifi 2024.8.30 cffi 1.17.1 chardet 5.2.0 charset-normalizer 3.4.0 chex 0.1.87 clarabel 0.9.0 click 8.1.7 cloudpathlib 0.20.0 cloudpickle 3.1.0 cmake 3.30.5 cmdstanpy 1.2.4 colorcet 3.1.0 colorlover 0.3.0 colour 0.1.5 community 1.0.0b1 confection 0.1.5 cons 0.4.6 contourpy 1.3.0 cryptography 43.0.3 cuda-python 12.2.1 cudf-cu12 24.10.1 cufflinks 0.17.3 cupy-cuda12x 12.2.0 cvxopt 1.3.2 cvxpy 1.5.3 cycler 0.12.1 cymem 2.0.8 Cython 3.0.11 dask 2024.10.0 datascience 0.17.6 db-dtypes 1.3.0 dbus-python 1.2.18 debugpy 1.8.0 decorator 4.4.2 defusedxml 0.7.1 Deprecated 1.2.14 diffusers 0.31.0 diskcache 5.6.3 distro 1.9.0 dlib 19.24.2 dm-tree 0.1.8 docker-pycreds 0.4.0 docstring_parser 0.16 docutils 0.21.2 dopamine_rl 4.0.9 duckdb 1.1.3 earthengine-api 1.2.0 easydict 1.13 ecos 2.0.14 editdistance 0.8.1 eerepr 0.0.4 einops 0.8.0 en-core-web-sm 3.7.1 entrypoints 0.4 et_xmlfile 2.0.0 etils 1.10.0 etuples 0.3.9 eval_type_backport 0.2.0 exceptiongroup 1.2.2 fastai 2.7.18 fastcore 1.7.19 fastdownload 0.0.7 fastjsonschema 2.20.0 fastprogress 1.0.3 fastrlock 0.8.2 filelock 3.16.1 firebase-admin 6.5.0 Flask 3.0.3 flatbuffers 24.3.25 flax 0.8.5 folium 0.18.0 fonttools 4.54.1 frozendict 2.4.6 frozenlist 1.5.0 fsspec 2024.10.0 future 1.0.0 gast 0.6.0 gcsfs 2024.10.0 GDAL 3.6.4 gdown 5.2.0 geemap 0.35.1 gensim 4.3.3 geocoder 1.38.1 geographiclib 2.0 geopandas 1.0.1 geopy 2.4.1 gin-config 0.5.0 gitdb 4.0.11 GitPython 3.1.43 glob2 0.7 google 2.0.3 google-ai-generativelanguage 0.6.10 google-api-core 2.19.2 google-api-python-client 2.151.0 google-auth 2.27.0 google-auth-httplib2 0.2.0 google-auth-oauthlib 1.2.1 google-cloud-aiplatform 1.71.1 google-cloud-bigquery 3.25.0 google-cloud-bigquery-connection 1.16.1 google-cloud-bigquery-storage 2.27.0 google-cloud-bigtable 2.26.0 google-cloud-core 2.4.1 google-cloud-datastore 2.20.1 google-cloud-firestore 2.19.0 google-cloud-functions 1.18.0 google-cloud-iam 2.16.0 google-cloud-language 2.15.0 google-cloud-pubsub 2.26.1 google-cloud-resource-manager 1.13.0 google-cloud-storage 2.8.0 google-cloud-translate 3.17.0 google-colab 1.0.0 google-crc32c 1.6.0 google-generativeai 0.8.3 google-pasta 0.2.0 google-resumable-media 2.7.2 googleapis-common-protos 1.65.0 googledrivedownloader 0.4 graphviz 0.20.3 greenlet 3.1.1 grpc-google-iam-v1 0.13.1 grpcio 1.67.1 grpcio-status 1.62.3 gspread 6.0.2 gspread-dataframe 3.3.1 gym 0.25.2 gym-notices 0.0.8 h11 0.14.0 h5netcdf 1.4.0 h5py 3.12.1 holidays 0.60 holoviews 1.20.0 html5lib 1.1 httpcore 1.0.6 httpimport 1.4.0 httplib2 0.22.0 httpx 0.27.2 huggingface-hub 0.26.2 humanize 4.11.0 hyperopt 0.2.7 ibis-framework 9.2.0 idna 3.10 imageio 2.36.0 imageio-ffmpeg 0.5.1 imagesize 1.4.1 imbalanced-learn 0.12.4 imgaug 0.4.0 immutabledict 4.2.0 importlib_metadata 8.5.0 importlib_resources 6.4.5 imutils 0.5.4 inflect 7.4.0 iniconfig 2.0.0 intel-cmplr-lib-ur 2025.0.0 intel-openmp 2025.0.0 ipyevents 2.0.2 ipyfilechooser 0.6.0 ipykernel 5.5.6 ipyleaflet 0.19.2 ipyparallel 8.8.0 ipython 7.34.0 ipython-genutils 0.2.0 ipython-sql 0.5.0 ipytree 0.2.2 ipywidgets 7.7.1 itsdangerous 2.2.0 jax 0.4.33 jax-cuda12-pjrt 0.4.33 jax-cuda12-plugin 0.4.33 jaxlib 0.4.33 jeepney 0.7.1 jellyfish 1.1.0 jieba 0.42.1 Jinja2 3.1.4 jiter 0.7.0 joblib 1.4.2 jsonpatch 1.33 jsonpickle 3.4.2 jsonpointer 3.0.0 jsonschema 4.23.0 jsonschema-specifications 2024.10.1 jupyter-client 6.1.12 jupyter-console 6.1.0 jupyter_core 5.7.2 jupyter-leaflet 0.19.2 jupyter-server 1.24.0 jupyterlab_pygments 0.3.0 jupyterlab_widgets 3.0.13 kaggle 1.6.17 kagglehub 0.3.3 keras 3.4.1 keyring 23.5.0 kiwisolver 1.4.7 langchain 0.3.7 langchain-core 0.3.15 langchain-text-splitters 0.3.2 langcodes 3.4.1 langsmith 0.1.142 language_data 1.2.0 launchpadlib 1.10.16 lazr.restfulclient 0.14.4 lazr.uri 1.0.6 lazy_loader 0.4 libclang 18.1.1 libcudf-cu12 24.10.1 librosa 0.10.2.post1 lightgbm 4.5.0 linkify-it-py 2.0.3 llama_cpp_python 0.3.1 llvmlite 0.43.0 locket 1.0.0 logical-unification 0.4.6 lxml 5.3.0 marisa-trie 1.2.1 Markdown 3.7 markdown-it-py 3.0.0 MarkupSafe 3.0.2 matplotlib 3.8.0 matplotlib-inline 0.1.7 matplotlib-venn 1.1.1 mdit-py-plugins 0.4.2 mdurl 0.1.2 miniKanren 1.0.3 missingno 0.5.2 mistune 3.0.2 mizani 0.13.0 mkl 2025.0.0 ml-dtypes 0.4.1 mlxtend 0.23.2 more-itertools 10.5.0 moviepy 1.0.3 mpmath 1.3.0 msgpack 1.1.0 multidict 6.1.0 multipledispatch 1.0.0 multitasking 0.0.11 murmurhash 1.0.10 music21 9.3.0 namex 0.0.8 natsort 8.4.0 nbclassic 1.1.0 nbclient 0.10.0 nbconvert 7.16.4 nbformat 5.10.4 ndindex 1.9.2 nest-asyncio 1.6.0 networkx 3.4.2 nibabel 5.3.2 nltk 3.9.1 notebook 6.5.5 notebook_shim 0.2.4 numba 0.60.0 numexpr 2.10.1 numpy 1.26.4 nvidia-cublas-cu12 12.6.3.3 nvidia-cuda-cupti-cu12 12.6.80 nvidia-cuda-nvcc-cu12 12.6.77 nvidia-cuda-runtime-cu12 12.6.77 nvidia-cudnn-cu12 9.5.1.17 nvidia-cufft-cu12 11.3.0.4 nvidia-curand-cu12 10.3.7.77 nvidia-cusolver-cu12 11.7.1.2 nvidia-cusparse-cu12 12.5.4.2 nvidia-nccl-cu12 2.23.4 nvidia-nvjitlink-cu12 12.6.77 nvtx 0.2.10 nx-cugraph-cu12 24.10.0 oauth2client 4.1.3 oauthlib 3.2.2 openai 1.54.3 opencv-contrib-python 4.10.0.84 opencv-python 4.10.0.84 opencv-python-headless 4.10.0.84 openpyxl 3.1.5 opentelemetry-api 1.28.0 opentelemetry-sdk 1.28.0 opentelemetry-semantic-conventions 0.49b0 opt_einsum 3.4.0 optax 0.2.3 optree 0.13.0 orbax-checkpoint 0.6.4 orjson 3.10.11 osqp 0.6.7.post3 packaging 24.2 pandas 2.2.2 pandas-datareader 0.10.0 pandas-gbq 0.24.0 pandas-stubs 2.2.2.240909 pandocfilters 1.5.1 panel 1.5.3 param 2.1.1 parso 0.8.4 parsy 2.1 partd 1.4.2 pathlib 1.0.1 patsy 0.5.6 peewee 3.17.7 peft 0.13.2 pexpect 4.9.0 pickleshare 0.7.5 pillow 11.0.0 pip 24.1.2 platformdirs 4.3.6 plotly 5.24.1 plotnine 0.14.1 pluggy 1.5.0 polars 1.9.0 pooch 1.8.2 portpicker 1.5.2 preshed 3.0.9 prettytable 3.12.0 proglog 0.1.10 progressbar2 4.5.0 prometheus_client 0.21.0 promise 2.3 prompt_toolkit 3.0.48 propcache 0.2.0 prophet 1.1.6 proto-plus 1.25.0 protobuf 4.25.5 psutil 5.9.5 psycopg2 2.9.10 ptyprocess 0.7.0 py-cpuinfo 9.0.0 py4j 0.10.9.7 pyarrow 17.0.0 pyarrow-hotfix 0.6 pyasn1 0.6.1 pyasn1_modules 0.4.1 pycocotools 2.0.8 pycparser 2.22 pydantic 2.9.2 pydantic_core 2.23.4 pydata-google-auth 1.8.2 pydot 3.0.2 pydotplus 2.0.2 PyDrive 1.3.1 PyDrive2 1.21.1 pyerfa 2.0.1.4 pygame 2.6.1 pygit2 1.16.0 Pygments 2.18.0 PyGObject 3.42.1 PyJWT 2.9.0 pylibcudf-cu12 24.10.1 pylibcugraph-cu12 24.10.0 pylibraft-cu12 24.10.0 pymc 5.18.0 pymystem3 0.2.0 pynvjitlink-cu12 0.4.0 pyogrio 0.10.0 PyOpenGL 3.1.7 pyOpenSSL 24.2.1 pyparsing 3.2.0 pyperclip 1.9.0 pyproj 3.7.0 pyshp 2.3.1 PySocks 1.7.1 pyspark 3.5.3 pytensor 2.25.5 pytest 8.3.3 python-apt 0.0.0 python-box 7.2.0 python-dateutil 2.8.2 python-louvain 0.16 python-slugify 8.0.4 python-utils 3.9.0 pytz 2024.2 pyviz_comms 3.0.3 PyYAML 6.0.2 pyzmq 24.0.1 qdldl 0.1.7.post4 ratelim 0.1.6 referencing 0.35.1 regex 2024.9.11 requests 2.32.3 requests-oauthlib 1.3.1 requests-toolbelt 1.0.0 requirements-parser 0.9.0 rich 13.9.4 rmm-cu12 24.10.0 rpds-py 0.21.0 rpy2 3.4.2 rsa 4.9 safetensors 0.4.5 scikit-image 0.24.0 scikit-learn 1.5.2 scipy 1.13.1 scooby 0.10.0 scs 3.2.7 seaborn 0.13.2 SecretStorage 3.3.1 Send2Trash 1.8.3 sentence-transformers 3.2.1 sentencepiece 0.2.0 sentry-sdk 2.18.0 setproctitle 1.3.3 setuptools 75.1.0 shap 0.46.0 shapely 2.0.6 shellingham 1.5.4 simple-parsing 0.1.6 six 1.16.0 sklearn-pandas 2.2.0 slicer 0.0.8 smart-open 7.0.5 smmap 5.0.1 sniffio 1.3.1 snowballstemmer 2.2.0 soundfile 0.12.1 soupsieve 2.6 soxr 0.5.0.post1 spacy 3.7.5 spacy-legacy 3.0.12 spacy-loggers 1.0.5 Sphinx 8.1.3 sphinxcontrib-applehelp 2.0.0 sphinxcontrib-devhelp 2.0.0 sphinxcontrib-htmlhelp 2.1.0 sphinxcontrib-jsmath 1.0.1 sphinxcontrib-qthelp 2.0.0 sphinxcontrib-serializinghtml 2.0.0 SQLAlchemy 2.0.36 sqlglot 25.1.0 sqlparse 0.5.1 srsly 2.4.8 stanio 0.5.1 statsmodels 0.14.4 StrEnum 0.4.15 stringzilla 3.10.8 sympy 1.13.1 tables 3.10.1 tabulate 0.9.0 tbb 2022.0.0 tcmlib 1.2.0 tenacity 9.0.0 tensorboard 2.17.0 tensorboard-data-server 0.7.2 tensorflow 2.17.0 tensorflow-datasets 4.9.7 tensorflow-hub 0.16.1 tensorflow-io-gcs-filesystem 0.37.1 tensorflow-metadata 1.13.1 tensorflow-probability 0.24.0 tensorstore 0.1.67 termcolor 2.5.0 terminado 0.18.1 text-unidecode 1.3 textblob 0.17.1 tf_keras 2.17.0 tf-slim 1.1.0 thinc 8.2.5 threadpoolctl 3.5.0 tifffile 2024.9.20 timm 1.0.11 tinycss2 1.4.0 tokenizers 0.20.3 toml 0.10.2 tomli 2.0.2 toolz 0.12.1 torch 2.5.0+cu121 torchaudio 2.5.0+cu121 torchsummary 1.5.1 torchvision 0.20.0+cu121 tornado 6.3.3 tqdm 4.66.6 traitlets 5.7.1 traittypes 0.2.1 transformers 4.46.2 tweepy 4.14.0 typeguard 4.4.1 typer 0.13.0 types-pytz 2024.2.0.20241003 types-setuptools 75.3.0.20241112 typing_extensions 4.12.2 tzdata 2024.2 tzlocal 5.2 uc-micro-py 1.0.3 umf 0.9.0 uritemplate 4.1.1 urllib3 2.2.3 vega-datasets 0.9.0 wadllib 1.3.6 wandb 0.18.6 wasabi 1.1.3 wcwidth 0.2.13 weasel 0.4.1 webcolors 24.8.0 webencodings 0.5.1 websocket-client 1.8.0 Werkzeug 3.1.3 wheel 0.44.0 widgetsnbextension 3.6.10 wordcloud 1.9.3 wrapt 1.16.0 xarray 2024.10.0 xarray-einstats 0.8.0 xgboost 2.1.2 xlrd 2.0.1 xyzservices 2024.9.0 yarl 1.17.1 yellowbrick 1.5 yfinance 0.2.48 zipp 3.20.2