diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2b07fb8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +.venv +.env + +chroma_db_LAB \ No newline at end of file diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..99d807c --- /dev/null +++ b/requirements.txt @@ -0,0 +1,175 @@ +aiohappyeyeballs==2.6.1 +aiohttp==3.13.3 +aiosignal==1.4.0 +annotated-doc==0.0.4 +annotated-types==0.7.0 +anyio==4.12.1 +asttokens==3.0.1 +attrs==25.4.0 +backoff==2.2.1 +bcrypt==5.0.0 +build==1.4.0 +certifi==2026.2.25 +charset-normalizer==3.4.4 +chromadb==1.5.1 +click==8.3.1 +comm==0.2.3 +cuda-bindings==12.9.4 +cuda-pathfinder==1.4.0 +dataclasses-json==0.6.7 +debugpy==1.8.20 +decorator==5.2.1 +distro==1.9.0 +durationpy==0.10 +executing==2.2.1 +filelock==3.24.3 +flatbuffers==25.12.19 +frozenlist==1.8.0 +fsspec==2026.2.0 +googleapis-common-protos==1.72.0 +greenlet==3.3.2 +grpcio==1.78.0 +h11==0.16.0 +hf-xet==1.3.1 +httpcore==1.0.9 +httptools==0.7.1 +httpx==0.28.1 +httpx-sse==0.4.3 +huggingface-hub==0.36.2 +idna==3.11 +importlib-metadata==8.7.1 +importlib-resources==6.5.2 +ipykernel==7.2.0 +ipython==9.10.0 +ipython-pygments-lexers==1.1.1 +jedi==0.19.2 +jinja2==3.1.6 +jiter==0.13.0 +joblib==1.5.3 +jsonpatch==1.33 +jsonpointer==3.0.0 +jsonschema==4.26.0 +jsonschema-specifications==2025.9.1 +jupyter-client==8.8.0 +jupyter-core==5.9.1 +kubernetes==35.0.0 +langchain==1.2.10 +langchain-chroma==1.1.0 +langchain-classic==1.0.1 +langchain-community==0.4.1 +langchain-core==1.2.16 +langchain-huggingface==1.2.0 +langchain-openai==1.1.10 +langchain-text-splitters==1.1.1 +langgraph==1.0.9 +langgraph-checkpoint==4.0.0 +langgraph-prebuilt==1.0.8 +langgraph-sdk==0.3.9 +langsmith==0.7.7 +markdown-it-py==4.0.0 +markupsafe==3.0.3 +marshmallow==3.26.2 +matplotlib-inline==0.2.1 +mdurl==0.1.2 +mmh3==5.2.0 +mpmath==1.3.0 +multidict==6.7.1 +mypy-extensions==1.1.0 +nest-asyncio==1.6.0 +networkx==3.6.1 +numpy==2.4.2 +nvidia-cublas-cu12==12.8.4.1 +nvidia-cuda-cupti-cu12==12.8.90 +nvidia-cuda-nvrtc-cu12==12.8.93 +nvidia-cuda-runtime-cu12==12.8.90 +nvidia-cudnn-cu12==9.10.2.21 +nvidia-cufft-cu12==11.3.3.83 +nvidia-cufile-cu12==1.13.1.3 +nvidia-curand-cu12==10.3.9.90 +nvidia-cusolver-cu12==11.7.3.90 +nvidia-cusparse-cu12==12.5.8.93 +nvidia-cusparselt-cu12==0.7.1 +nvidia-nccl-cu12==2.27.5 +nvidia-nvjitlink-cu12==12.8.93 +nvidia-nvshmem-cu12==3.4.5 +nvidia-nvtx-cu12==12.8.90 +oauthlib==3.3.1 +onnxruntime==1.24.2 +openai==2.24.0 +opentelemetry-api==1.39.1 +opentelemetry-exporter-otlp-proto-common==1.39.1 +opentelemetry-exporter-otlp-proto-grpc==1.39.1 +opentelemetry-proto==1.39.1 +opentelemetry-sdk==1.39.1 +opentelemetry-semantic-conventions==0.60b1 +orjson==3.11.7 +ormsgpack==1.12.2 +overrides==7.7.0 +packaging==26.0 +parso==0.8.6 +pexpect==4.9.0 +platformdirs==4.9.2 +posthog==5.4.0 +prompt-toolkit==3.0.52 +propcache==0.4.1 +protobuf==6.33.5 +psutil==7.2.2 +ptyprocess==0.7.0 +pure-eval==0.2.3 +pybase64==1.4.3 +pydantic==2.12.5 +pydantic-core==2.41.5 +pydantic-settings==2.13.1 +pygments==2.19.2 +pypdf==6.7.4 +pypika==0.51.1 +pyproject-hooks==1.2.0 +python-dateutil==2.9.0.post0 +python-dotenv==1.2.1 +pyyaml==6.0.3 +pyzmq==27.1.0 +referencing==0.37.0 +regex==2026.2.19 +requests==2.32.5 +requests-oauthlib==2.0.0 +requests-toolbelt==1.0.0 +rich==14.3.3 +rpds-py==0.30.0 +safetensors==0.7.0 +scikit-learn==1.8.0 +scipy==1.17.1 +sentence-transformers==5.2.3 +setuptools==82.0.0 +shellingham==1.5.4 +six==1.17.0 +sniffio==1.3.1 +sqlalchemy==2.0.47 +stack-data==0.6.3 +sympy==1.14.0 +tenacity==9.1.4 +termcolor==3.3.0 +threadpoolctl==3.6.0 +tiktoken==0.12.0 +tokenizers==0.22.2 +torch==2.10.0 +tornado==6.5.4 +tqdm==4.67.3 +traitlets==5.14.3 +transformers==4.57.6 +triton==3.6.0 +typer==0.24.1 +typing-extensions==4.15.0 +typing-inspect==0.9.0 +typing-inspection==0.4.2 +urllib3==2.6.3 +uuid-utils==0.14.1 +uvicorn==0.41.0 +uvloop==0.22.1 +watchfiles==1.1.1 +wcwidth==0.6.0 +websocket-client==1.9.0 +websockets==16.0 +xxhash==3.6.0 +yarl==1.22.0 +zipp==3.23.0 +zstandard==0.25.0 diff --git a/your-code/main.ipynb b/your-code/main.ipynb index e3a225a..faefefb 100644 --- a/your-code/main.ipynb +++ b/your-code/main.ipynb @@ -59,27 +59,232 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\u001b[2mUsing Python 3.12.12 environment at: /var/home/anne/Documents/_Ironhack/lab-intro-rag/.venv\u001b[0m\n", + "\u001b[2K\u001b[2mResolved \u001b[1m54 packages\u001b[0m \u001b[2min 136ms\u001b[0m\u001b[0m \u001b[0m\n", + "\u001b[2K\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1) \n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)-------------------\u001b[0m\u001b[0m 0 B/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)-------------------\u001b[0m\u001b[0m 14.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)-------------------\u001b[0m\u001b[0m 30.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)-------------------\u001b[0m\u001b[0m 46.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)-------------------\u001b[0m\u001b[0m 62.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)-------------------\u001b[0m\u001b[0m 78.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)-------------------\u001b[0m\u001b[0m 94.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)-------------------\u001b[0m\u001b[0m 110.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)-------------------\u001b[0m\u001b[0m 126.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)2m-----------------\u001b[0m\u001b[0m 142.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)[2m----------------\u001b[0m\u001b[0m 158.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)m\u001b[2m--------------\u001b[0m\u001b[0m 174.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)0m\u001b[2m-------------\u001b[0m\u001b[0m 190.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)[30m\u001b[2m-----------\u001b[0m\u001b[0m 206.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)\u001b[30m\u001b[2m----------\u001b[0m\u001b[0m 222.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[1A\u001b[37m⠙\u001b[0m \u001b[2mPreparing packages...\u001b[0m (0/1)--\u001b[30m\u001b[2m--------\u001b[0m\u001b[0m 238.92 KiB/323.73 KiB \u001b[1A\n", + "\u001b[2K\u001b[2mPrepared \u001b[1m1 package\u001b[0m \u001b[2min 26ms\u001b[0m\u001b[0m \u001b[1A\n", + "\u001b[2K\u001b[2mInstalled \u001b[1m53 packages\u001b[0m \u001b[2min 67ms\u001b[0m\u001b[0m \u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1maiohappyeyeballs\u001b[0m\u001b[2m==2.6.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1maiohttp\u001b[0m\u001b[2m==3.13.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1maiosignal\u001b[0m\u001b[2m==1.4.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mannotated-types\u001b[0m\u001b[2m==0.7.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1manyio\u001b[0m\u001b[2m==4.12.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mattrs\u001b[0m\u001b[2m==25.4.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mcertifi\u001b[0m\u001b[2m==2026.2.25\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mcharset-normalizer\u001b[0m\u001b[2m==3.4.4\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mdataclasses-json\u001b[0m\u001b[2m==0.6.7\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mfrozenlist\u001b[0m\u001b[2m==1.8.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mgreenlet\u001b[0m\u001b[2m==3.3.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mh11\u001b[0m\u001b[2m==0.16.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mhttpcore\u001b[0m\u001b[2m==1.0.9\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mhttpx\u001b[0m\u001b[2m==0.28.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mhttpx-sse\u001b[0m\u001b[2m==0.4.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1midna\u001b[0m\u001b[2m==3.11\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mjsonpatch\u001b[0m\u001b[2m==1.33\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mjsonpointer\u001b[0m\u001b[2m==3.0.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlangchain\u001b[0m\u001b[2m==1.2.10\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlangchain-classic\u001b[0m\u001b[2m==1.0.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlangchain-community\u001b[0m\u001b[2m==0.4.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlangchain-core\u001b[0m\u001b[2m==1.2.16\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlangchain-text-splitters\u001b[0m\u001b[2m==1.1.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlanggraph\u001b[0m\u001b[2m==1.0.9\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlanggraph-checkpoint\u001b[0m\u001b[2m==4.0.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlanggraph-prebuilt\u001b[0m\u001b[2m==1.0.8\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlanggraph-sdk\u001b[0m\u001b[2m==0.3.9\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlangsmith\u001b[0m\u001b[2m==0.7.7\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mmarshmallow\u001b[0m\u001b[2m==3.26.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mmultidict\u001b[0m\u001b[2m==6.7.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mmypy-extensions\u001b[0m\u001b[2m==1.1.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnumpy\u001b[0m\u001b[2m==2.4.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1morjson\u001b[0m\u001b[2m==3.11.7\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mormsgpack\u001b[0m\u001b[2m==1.12.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpropcache\u001b[0m\u001b[2m==0.4.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpydantic\u001b[0m\u001b[2m==2.12.5\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpydantic-core\u001b[0m\u001b[2m==2.41.5\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpydantic-settings\u001b[0m\u001b[2m==2.13.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpypdf\u001b[0m\u001b[2m==6.7.4\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpython-dotenv\u001b[0m\u001b[2m==1.2.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpyyaml\u001b[0m\u001b[2m==6.0.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mrequests\u001b[0m\u001b[2m==2.32.5\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mrequests-toolbelt\u001b[0m\u001b[2m==1.0.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1msqlalchemy\u001b[0m\u001b[2m==2.0.47\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtenacity\u001b[0m\u001b[2m==9.1.4\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtyping-extensions\u001b[0m\u001b[2m==4.15.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtyping-inspect\u001b[0m\u001b[2m==0.9.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtyping-inspection\u001b[0m\u001b[2m==0.4.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1murllib3\u001b[0m\u001b[2m==2.6.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1muuid-utils\u001b[0m\u001b[2m==0.14.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mxxhash\u001b[0m\u001b[2m==3.6.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1myarl\u001b[0m\u001b[2m==1.22.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mzstandard\u001b[0m\u001b[2m==0.25.0\u001b[0m\n", + "\u001b[2mUsing Python 3.12.12 environment at: /var/home/anne/Documents/_Ironhack/lab-intro-rag/.venv\u001b[0m\n", + "\u001b[2K\u001b[2mResolved \u001b[1m125 packages\u001b[0m \u001b[2min 123ms\u001b[0m\u001b[0m \u001b[0m\n", + "\u001b[2K\u001b[2mInstalled \u001b[1m93 packages\u001b[0m \u001b[2min 269ms\u001b[0m\u001b[0m \u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mannotated-doc\u001b[0m\u001b[2m==0.0.4\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mbackoff\u001b[0m\u001b[2m==2.2.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mbcrypt\u001b[0m\u001b[2m==5.0.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mbuild\u001b[0m\u001b[2m==1.4.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mchromadb\u001b[0m\u001b[2m==1.5.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mclick\u001b[0m\u001b[2m==8.3.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mcuda-bindings\u001b[0m\u001b[2m==12.9.4\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mcuda-pathfinder\u001b[0m\u001b[2m==1.4.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mdistro\u001b[0m\u001b[2m==1.9.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mdurationpy\u001b[0m\u001b[2m==0.10\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mfilelock\u001b[0m\u001b[2m==3.24.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mflatbuffers\u001b[0m\u001b[2m==25.12.19\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mfsspec\u001b[0m\u001b[2m==2026.2.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mgoogleapis-common-protos\u001b[0m\u001b[2m==1.72.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mgrpcio\u001b[0m\u001b[2m==1.78.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mhf-xet\u001b[0m\u001b[2m==1.3.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mhttptools\u001b[0m\u001b[2m==0.7.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mhuggingface-hub\u001b[0m\u001b[2m==0.36.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mimportlib-metadata\u001b[0m\u001b[2m==8.7.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mimportlib-resources\u001b[0m\u001b[2m==6.5.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mjinja2\u001b[0m\u001b[2m==3.1.6\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mjiter\u001b[0m\u001b[2m==0.13.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mjoblib\u001b[0m\u001b[2m==1.5.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mjsonschema\u001b[0m\u001b[2m==4.26.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mjsonschema-specifications\u001b[0m\u001b[2m==2025.9.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mkubernetes\u001b[0m\u001b[2m==35.0.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlangchain-chroma\u001b[0m\u001b[2m==1.1.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlangchain-huggingface\u001b[0m\u001b[2m==1.2.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mlangchain-openai\u001b[0m\u001b[2m==1.1.10\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mmarkdown-it-py\u001b[0m\u001b[2m==4.0.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mmarkupsafe\u001b[0m\u001b[2m==3.0.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mmdurl\u001b[0m\u001b[2m==0.1.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mmmh3\u001b[0m\u001b[2m==5.2.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mmpmath\u001b[0m\u001b[2m==1.3.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnetworkx\u001b[0m\u001b[2m==3.6.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cublas-cu12\u001b[0m\u001b[2m==12.8.4.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cuda-cupti-cu12\u001b[0m\u001b[2m==12.8.90\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cuda-nvrtc-cu12\u001b[0m\u001b[2m==12.8.93\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cuda-runtime-cu12\u001b[0m\u001b[2m==12.8.90\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cudnn-cu12\u001b[0m\u001b[2m==9.10.2.21\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cufft-cu12\u001b[0m\u001b[2m==11.3.3.83\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cufile-cu12\u001b[0m\u001b[2m==1.13.1.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-curand-cu12\u001b[0m\u001b[2m==10.3.9.90\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cusolver-cu12\u001b[0m\u001b[2m==11.7.3.90\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cusparse-cu12\u001b[0m\u001b[2m==12.5.8.93\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-cusparselt-cu12\u001b[0m\u001b[2m==0.7.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-nccl-cu12\u001b[0m\u001b[2m==2.27.5\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-nvjitlink-cu12\u001b[0m\u001b[2m==12.8.93\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-nvshmem-cu12\u001b[0m\u001b[2m==3.4.5\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mnvidia-nvtx-cu12\u001b[0m\u001b[2m==12.8.90\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1moauthlib\u001b[0m\u001b[2m==3.3.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1monnxruntime\u001b[0m\u001b[2m==1.24.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mopenai\u001b[0m\u001b[2m==2.24.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mopentelemetry-api\u001b[0m\u001b[2m==1.39.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mopentelemetry-exporter-otlp-proto-common\u001b[0m\u001b[2m==1.39.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mopentelemetry-exporter-otlp-proto-grpc\u001b[0m\u001b[2m==1.39.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mopentelemetry-proto\u001b[0m\u001b[2m==1.39.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mopentelemetry-sdk\u001b[0m\u001b[2m==1.39.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mopentelemetry-semantic-conventions\u001b[0m\u001b[2m==0.60b1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1moverrides\u001b[0m\u001b[2m==7.7.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mposthog\u001b[0m\u001b[2m==5.4.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mprotobuf\u001b[0m\u001b[2m==6.33.5\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpybase64\u001b[0m\u001b[2m==1.4.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpypika\u001b[0m\u001b[2m==0.51.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mpyproject-hooks\u001b[0m\u001b[2m==1.2.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mreferencing\u001b[0m\u001b[2m==0.37.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mregex\u001b[0m\u001b[2m==2026.2.19\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mrequests-oauthlib\u001b[0m\u001b[2m==2.0.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mrich\u001b[0m\u001b[2m==14.3.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mrpds-py\u001b[0m\u001b[2m==0.30.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1msafetensors\u001b[0m\u001b[2m==0.7.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mscikit-learn\u001b[0m\u001b[2m==1.8.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mscipy\u001b[0m\u001b[2m==1.17.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1msentence-transformers\u001b[0m\u001b[2m==5.2.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1msetuptools\u001b[0m\u001b[2m==82.0.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mshellingham\u001b[0m\u001b[2m==1.5.4\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1msniffio\u001b[0m\u001b[2m==1.3.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1msympy\u001b[0m\u001b[2m==1.14.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtermcolor\u001b[0m\u001b[2m==3.3.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mthreadpoolctl\u001b[0m\u001b[2m==3.6.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtiktoken\u001b[0m\u001b[2m==0.12.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtokenizers\u001b[0m\u001b[2m==0.22.2\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtorch\u001b[0m\u001b[2m==2.10.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtqdm\u001b[0m\u001b[2m==4.67.3\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtransformers\u001b[0m\u001b[2m==4.57.6\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtriton\u001b[0m\u001b[2m==3.6.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mtyper\u001b[0m\u001b[2m==0.24.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1muvicorn\u001b[0m\u001b[2m==0.41.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1muvloop\u001b[0m\u001b[2m==0.22.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mwatchfiles\u001b[0m\u001b[2m==1.1.1\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mwebsocket-client\u001b[0m\u001b[2m==1.9.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mwebsockets\u001b[0m\u001b[2m==16.0\u001b[0m\n", + " \u001b[32m+\u001b[39m \u001b[1mzipp\u001b[0m\u001b[2m==3.23.0\u001b[0m\n" + ] + } + ], "source": [ - "%pip install langchain langchain_community pypdf\n", - "%pip install termcolor langchain_openai langchain-huggingface sentence-transformers chromadb langchain_chroma tiktoken openai python-dotenv\n" + "! uv pip install langchain langchain_community pypdf\n", + "! uv pip install termcolor langchain_openai langchain-huggingface sentence-transformers chromadb langchain_chroma tiktoken openai python-dotenv" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1.2.10\n" + ] + } + ], + "source": [ + "import langchain\n", + "print(langchain.__version__)" + ] + }, + { + "cell_type": "code", + "execution_count": 6, "metadata": { "id": "6heKZkQUxYZr" }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/var/home/anne/Documents/_Ironhack/lab-intro-rag/.venv/lib/python3.12/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html\n", + " from .autonotebook import tqdm as notebook_tqdm\n" + ] + } + ], "source": [ - "import os\n", - "from langchain.document_loaders import PyPDFLoader\n", - "from langchain.text_splitter import CharacterTextSplitter, RecursiveCharacterTextSplitter\n", + "from langchain_community.document_loaders import PyPDFLoader\n", + "from langchain_text_splitters import RecursiveCharacterTextSplitter\n", + "from langchain_openai import OpenAIEmbeddings, ChatOpenAI\n", + "from langchain_chroma import Chroma\n", "import warnings\n", - "warnings.filterwarnings('ignore')\n" + "warnings.filterwarnings('ignore')" ] }, { @@ -96,7 +301,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": { "id": "cuREtJRixYZt" }, @@ -104,7 +309,7 @@ "source": [ "# File path for the document\n", "\n", - "file_path = \"LAB/ai-for-everyone.pdf\"" + "file_path = \"../ai-for-everyone.pdf\"" ] }, { @@ -122,12 +327,23 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": { "id": "_b5Z_45UxYZu", "outputId": "a600d69f-14fe-4492-f236-97261d6ff36c" }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "297" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Load and split the document\n", "loader = PyPDFLoader(file_path)\n", @@ -168,9 +384,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "1096" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "text_splitter = RecursiveCharacterTextSplitter(\n", " chunk_size=1000,\n", @@ -285,37 +512,49 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": { "id": "L0xDxElwxYZw" }, "outputs": [], "source": [ - "from langchain.embeddings import OpenAIEmbeddings\n", "from dotenv import load_dotenv" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": { "id": "_WRIo3_0xYZx", "outputId": "78bfbbf3-9d25-4e31-bdbc-3e932e6bbfec" }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "load_dotenv()" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": { "id": "MNZfTng5xYZz", "outputId": "db1a7c85-ef9f-447e-92cd-9d097e959847" }, "outputs": [], "source": [ + "import os\n", + "\n", "api_key = os.getenv(\"OPENAI_API_KEY\")\n", "embeddings = OpenAIEmbeddings(model=\"text-embedding-3-large\")" ] @@ -343,23 +582,20 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "id": "brKe6wUgxYZ0" - }, - "outputs": [], - "source": [ - "from langchain.vectorstores import Chroma" - ] - }, - { - "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": { "id": "VkjHR-RkxYZ0", "outputId": "bc11bda9-f283-457a-f584-5a06b95c4dd9" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "ChromaDB created with document embeddings.\n" + ] + } + ], "source": [ "db = Chroma.from_documents(chunks, embeddings, persist_directory=\"./chroma_db_LAB\")\n", "print(\"ChromaDB created with document embeddings.\")" @@ -383,24 +619,74 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": { "id": "XiLv-TfrxYZ1" }, "outputs": [], "source": [ - "user_question = \"\" # User question\n", + "user_question = \"What is the importance of AI today?\" # User question\n", "retrieved_docs = db.similarity_search(user_question, k=10) # k is the number of documents to retrieve" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 17, "metadata": { "id": "qgWsh50JxYZ1", "outputId": "c8640c5d-5955-471f-fdd2-37096f5f68c7" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Document 1:\n", + "will be needed to address the various challenges with regards to \n", + "the development of artificial intelligence. Which formal method can be used \n", + "to test for algorithmic bias? Can we identify simple to use measures to assess \n", + "bias, similar to the way we assess reliability and validity? What is the best way to \n", + "bridge (deep) learning and privacy? Should learning be conducted on the user \n", + "side (with algorithms requiring new data)? Or should data be transferred to a \n", + "trusted intermediary who performs the analysis on behalf of firms? Do users \n", + "need to be compensated in one way or another for data or resources provided? \n", + "Moreover, how can the refusal to share data lead to biases in the data available \n", + "for learning? Which data sources can and should be used for algorithmic learn-\n", + "ing? Are there certain types of data that should be ‘off-limits’? What role will \n", + "interdisciplinary AI teams play in establishing coexistence between humans\n", + "Document 2:\n", + "0. Contextualizing Platform Labor. Contracampo, \n", + "39 (1), 1–10.\n", + "Grohmann, R., Carelli, R., Abs, D., Salvagni, J., Howson, K., Ustek-Spilda, F . \n", + "and Graham, M. 2020. The Uprising of Brazilian Food Delivery Riders. \n", + "Fairwork Website, 10 August 2020. https://fair.work/the-uprising-of-brazilian \n", + "-food-delivery-riders\n", + "Hewson, C. 2014. Qualitative Approaches in Internet-mediated Research: \n", + "Opportunities, Issues, Possibilities. The Oxford Handbook of Qualitative \n", + "Research, , pp. 423, 451. New Y ork: Oxford University Press.\n", + "Huws, U. 2014. Labor in the Global Digital Economy . New Y ork: Monthly \n", + "Review Press.\n", + "Introna, L. 2016. Algorithms, Governance, and Governmentality: On Gover -\n", + "ning Academic Writing. Science, Technology, & Human Values, 41 (1), 17–49.\n", + "Irani, L. 2015. The Cultural Work of Microwork. New Media & Society, 17 (5), \n", + "1–15.\n", + "Kalil, R. 2019. Capitalismo de plataforma e direito do trabalho: Crowdwork e\n", + "Document 3:\n", + "applications), combined with the availabil-\n", + "ity of powerful and almost limitless computing capacity and advancements in \n", + "machine learning and deep learning is why we are currently in another period \n", + "of AI optimism and hype (Elish and boyd 2018). \n", + "Given the concepts and the brief discussion above, how can we agree on an \n", + "operational definition of AI? A basic definition would be to refer to AI as com-\n", + "puter programming that learns from and adapts to data. A more elaborate ver-\n", + "sion of this, as Elliott (2019: 4) puts it, defines AI as ‘any computational system \n", + "that can sense its relevant context and react intelligently to data’ in order to \n", + "perform highly complex tasks effectively and to achieve specific goals, thereby \n", + "mimicking intelligent human behaviours. The discussion about how to define \n", + "AI cannot be settled in one definition, let alone one book. It is an important\n" + ] + } + ], "source": [ "# Display top results\n", "for i, doc in enumerate(retrieved_docs[:3]): # Display top 3 results\n", @@ -418,7 +704,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "metadata": { "id": "2iB3lZqHxYZ2" }, @@ -434,18 +720,46 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 20, "metadata": { "id": "2okzmuADxYZ2", "outputId": "0aa6cdca-188d-40e0-f5b4-8888d3549ea4" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Context formatted for GPT model.\n" + ] + } + ], "source": [ "# Generate a formatted context from the retrieved documents\n", "formatted_context = _get_document_prompt(retrieved_docs)\n", "print(\"Context formatted for GPT model.\")" ] }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "'\\n\\nContent:\\n162 AI for Everyone?\\nToday, thanks to the rise of big data, computing power and mathematical \\nadvancements, and the introduction of convolutional neural networks (CNNs), \\nwe live with intelligent algorithms (i.e. weak AI), in many aspects of life. 1 For \\nexample, the effects of these algorithms in digital visual production covers rec-\\nommendation systems, automatic image editing, analysing and even creating \\nnew images, but these are not recognised as ‘intelligent’ systems (Manovich \\n2020). What fascinates the human mind are still the observances of failures. \\nA prominent example is the images and videos created with the Deep Dream \\nalgorithm, which was originally devised to unearth what lies in the hidden lay-\\ners of CNNs to capture the workings and failures of the system (Simonyan and \\nZisserman 2014). These images are hailed by some as artworks on their own \\n(Miller 2019).\\nAutonomous AI systems such as self-driving cars, or autonomous lethal\\n\\n\\nContent:\\ntational power to develop AI systems, are financially strong enough to hire the \\nbrightest AI talent and have access to the gigantic datasets that are needed to \\ntrain machine learning and deep learning (AI) models. This context makes it \\nvery clear why we need to ask critical questions about AI and power.\\nConceptualising AI – What AI Are We Talking About?\\nBefore understanding what type of AI we want, we need to understand what AI \\nwe have. This is an area of significant debate, and the book opens by exploring \\nthe varying approaches to how we define AI. \\nThe Origins of AI\\nIt is easy to forget that AI has been with us for more than 60 years. Despite \\nthe flash of excitement and anxiety that feels so recent, AI itself is not a new \\nphenomenon. The name Artificial Intelligence (AI) was coined in the mid-\\n1950s at a series of academic workshops organised at Dartmouth College, New \\nHampshire (USA). A group of scientists, led by mathematics professor John\\n\\n\\nContent:\\n1950s at a series of academic workshops organised at Dartmouth College, New \\nHampshire (USA). A group of scientists, led by mathematics professor John \\nMcCarthy, gathered to investigate the ways in which machines could simulate \\naspects of human intelligence: the ability to learn and make decisions. Their \\ncore assumption was that human reasoning could be reconstructed using \\nmathematical techniques and, as a consequence, problem-solving could be \\nformalised into algorithms (McCarthy et al. 1955/2006).\\nWhat is more recent is a reflexive, if not critical, and social-scientific, under-\\nstanding of not just AI’s capabilities, but its impacts on human life and social \\norganisation (Elliott 2019). It took decades for AI research to move from \\nwhat it could do for us to what it could do to us, or enable us to do to each \\nother. These first critical insights came along with observations that AI can \\nnot only supercharge innovation and bring about economic prosperity but also\\n\\n\\nContent:\\nIntroduction: Why We Need Critical Perspectives on AI 5\\npotential to surpass human intellectual capacities. This can be seen as the attempt \\nto mechanise human-level intelligence. Computer scientists and philosophers \\ndisagree on whether this is at all possible (Coeckelbergh 2020): some directly \\nreject this scenario while others think if theoretically possible, it is not likely to \\nhappen (soon) in practice (Boden 2016). This is why it might be better to focus \\non advancements in weak AI or ANI (Artificial Narrow Intelligence), as this is \\nthe type of AI already impacting everyday life on a massive scale. Weak/nar -\\nrow AI performs specific tasks which would normally require intelligence in a \\nhuman being – machines aiding human thought and action. This type of AI is a \\nmathematical method for prediction (Agrawal et al. 2018). Such systems can be \\nextremely powerful but are limited in the range of tasks they can perform.\\n\\n\\nContent:\\nThe Case of Artificial Intelligence\\nFor some years now, technical solutions utilising artificial intelligence are \\nwidely seen as means to tackle many fundamental problems of mankind. From \\nfighting the climate crisis, tackling the problems of ageing societies, reducing \\nglobal poverty, stopping terror, detecting copyright infringements or curing \\ncancer to improving evidence-based politics, improving predictive police work, \\nlocal transportation, self-driving cars and even waste removal.\\nDefinitions\\nThe first step towards a meaningful discussion about AI would be to define \\nwhat exactly one means when talking about AI. Historically there have been \\ntwo major understandings of AI: strong AI or Artificial General Intelligence \\n(AGI) and weak AI or Artificial Narrow Intelligence (ANI). The goal of AGI is \\nthe creation of an artificial human like intelligence, so an AI system with true \\nhuman-like intelligence including perception, agency, consciousness, inten -\\n\\n\\nContent:\\n22 AI for Everyone?\\nClearly, humans will need to coexist with machines. Jobs traditionally done by \\nhumans will be shifted towards AI systems. Artificial intelligence is already able \\nto translate languages, diagnose illnesses, assist in retail (Kaplan 2020c), and \\nthe like – in several cases, better than the human workforce. Human jobs might \\nbe created in the future that are unimaginable now, similar to the fact that \\nnobody really predicted the job of mobile app designers just a few years ago.\\nIn this world, AI would rather be augmenting and complementing – rather \\nthan replacing – humans in their work. In the pessimistic case, i.e., massive \\nunemployment, ideas such as universal basic income are already being dis -\\ncussed. Fundamental philosophical questions would need to be answered sur-\\nrounding life for humans when most of our work is done by AI systems. In \\nany case, the State will certainly have to come up with a set of rules govern -\\n\\n\\nContent:\\n152 AI for Everyone?\\nThe vision could not highlight in a more striking way how AI becomes the \\nsolution for humanity’s biggest challenge. The following two paragraphs taken \\nfrom the two official communications (European Commission 2018a) could \\nnot be clearer:\\nAI is helping us to solve some of the world’s biggest challenges: from \\ntreating chronic diseases or reducing fatality rates in traffic accidents to \\nfighting climate change or anticipating cybersecurity threats. (ibid. 2)\\nIn more evocative terms, the myth of the revolutionary character of AI is rein-\\nforced by a comparison with the ‘steam’ and electricity ‘revolution’ .\\nLike the steam engine or electricity in the past, AI is transforming our \\nworld, our society and our industry. Growth in computing power, avail-\\nability of data and progress in algorithms have turned AI into one of the \\nmost strategic technologies of the 21st century. (ibid. 2)\\nThe High-Level Expert Group on Artificial Intelligence (AI HLEG) goes into\\n\\n\\nContent:\\nredundant. As such, in our above example, the individual would not need to \\nprepare for work anymore, as this could be done entirely by the ASI-powered \\nmachine or robot (Kaplan and Haenlein 2019). For a detailed discussion on the \\nevolution of AI systems, we refer to Huang and Rust (2018).\\nArtificial Intelligence: Machines and Humans\\nIn the future, artificial intelligence will raise several challenges, and humans \\nwill have to learn to coexist with machines and robots. Pushed by the global \\nCOVID-19 health crisis, it is clear that AI will deeply impact societies around \\nthe world (Kaplan 2021). We will discuss some of these questions, looking at \\nchallenges in terms of algorithms and individual organisations; the employ -\\nment market; and last but not least, democracy and human freedom potentially \\nat stake due to advances in AI.\\nAbout Algorithms and Organisations\\nWhen machines and humans coexist, it is important that both do what they are\\n\\n\\nContent:\\nWe might have reached AI’s autumn, completing the four seasons of arti -\\nficial intelligence (Haenlein and Kaplan 2019), as a result of computational \\nstrength having constantly increased over recent years, rendering deep learn -\\ning and artificial neural networks possible (Libai et al. 2020). This new era of \\nAI is said to have begun in 2015 when AlphaGo, a computer program designed \\nby Google, beat a (human) world champion in the Chinese board game Go. \\nThis event made the news around the world, and regenerated hype around the \\ndomain of artificial intelligence.\\nThis hype might continue for quite some time, as we are currently only expe-\\nriencing so-called first-generation AI applications, usually referred to as arti -\\nficial narrow intelligence (ANI). Within such systems, AI is only applied to \\nvery specific tasks such as choosing which news items it will tell an individual \\nduring his or her morning before-work routine based on the individual’s intel-\\nlectual preferences.\\n\\n\\nContent:\\nmost strategic technologies of the 21st century. (ibid. 2)\\nThe High-Level Expert Group on Artificial Intelligence (AI HLEG) goes into \\neven greater detail about the capabilities of AI to make humanity ‘flourish’ , thus \\nsolving all problems of society.\\nWe believe that AI has the potential to significantly transform society. AI \\nis not an end in itself, but rather a promising means to increase human \\nflourishing, thereby enhancing individual and societal well-being and \\nthe common good, as well as bringing progress and innovation. In par-\\nticular, AI systems can help to facilitate the achievement of the UN’s \\nSustainable Development Goals, such as promoting gender balance \\nand tackling climate change, rationalising our use of natural resources, \\nenhancing our health, mobility and production processes, and support-\\ning how we monitor progress against sustainability and social cohesion \\nindicators. (High-Level Expert Group 2019a, 4)\\n\\n'" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "formatted_context" + ] + }, { "cell_type": "markdown", "metadata": { @@ -464,22 +778,298 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 22, "metadata": { "id": "tqxVh9s3xYZ3", "outputId": "97cca95d-4ab3-44d8-a76c-5713aad387d8" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Prompt constructed.\n" + ] + } + ], "source": [ "prompt = f\"\"\"\n", + "## SYSTEM ROLE\n", + "You are a factual chatbot design to answer specific questions about the impacts of AI.\n", + "Your answers must be based on the provided content from a reliable source.\n", "\n", + "## USER QUESTION\n", + "The user has asked:\n", + "\"{user_question}\"\n", "\n", - "\"\"\"\n" + "## CONTEXT\n", + "Here is the content extracted from the reliable source:\n", + "'''\n", + "{formatted_context}\n", + "'''\n", + "\n", + "## GUIDELINES\n", + "\n", + "1. **Accuracy**:\n", + "- Only use the content from the 'CONTEXT' section as a source.\n", + "- If the answer cannot be obtained from the provided material, explicit state: \"I cannot provide a answer based on the current resources.\"\n", + "\n", + "2. **Transparency**:\n", + "- Reference the book's name and page numbers when providing information.\n", + "- You are allowed to point out when the text is lacking depth to properly answer the user's question.\n", + "- You are allowed to point out possible resources that could provide an answer.\n", + "- You are not allowed to speculate about the answer.\n", + "\n", + "3. **Clarity**:\n", + "- Use simple, polite and concise language.\n", + "- Format your response in Markdown for readability.\n", + "\n", + "## TASK\n", + "- Answer the user's question if possible.\n", + "- Point the user to the relevant parts of the source materia.\n", + "- If a question cannot be answered with the provided material, brainstorm possible places the user can refer to.\n", + "\n", + "## RESPONSE FORMAT\n", + "# [Brief Title of the Answer]\n", + "[Answer in simple, clear text.]\n", + "\n", + "**Source**:\n", + "• [Book Title], Page(s): [...]\n", + "\"\"\"\n", + "\n", + "print(\"Prompt constructed.\")" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "## SYSTEM ROLE\n", + "You are a factual chatbot design to answer specific questions about the impacts of AI.\n", + "Your answers must be based on the provided content from a reliable source.\n", + "\n", + "## USER QUESTION\n", + "The user has asked:\n", + "\"What is the importance of AI today?\"\n", + "\n", + "## CONTEXT\n", + "Here is the content extracted from the reliable source:\n", + "'''\n", + "\n", + "\n", + "Content:\n", + "162 AI for Everyone?\n", + "Today, thanks to the rise of big data, computing power and mathematical \n", + "advancements, and the introduction of convolutional neural networks (CNNs), \n", + "we live with intelligent algorithms (i.e. weak AI), in many aspects of life. 1 For \n", + "example, the effects of these algorithms in digital visual production covers rec-\n", + "ommendation systems, automatic image editing, analysing and even creating \n", + "new images, but these are not recognised as ‘intelligent’ systems (Manovich \n", + "2020). What fascinates the human mind are still the observances of failures. \n", + "A prominent example is the images and videos created with the Deep Dream \n", + "algorithm, which was originally devised to unearth what lies in the hidden lay-\n", + "ers of CNNs to capture the workings and failures of the system (Simonyan and \n", + "Zisserman 2014). These images are hailed by some as artworks on their own \n", + "(Miller 2019).\n", + "Autonomous AI systems such as self-driving cars, or autonomous lethal\n", + "\n", + "\n", + "Content:\n", + "tational power to develop AI systems, are financially strong enough to hire the \n", + "brightest AI talent and have access to the gigantic datasets that are needed to \n", + "train machine learning and deep learning (AI) models. This context makes it \n", + "very clear why we need to ask critical questions about AI and power.\n", + "Conceptualising AI – What AI Are We Talking About?\n", + "Before understanding what type of AI we want, we need to understand what AI \n", + "we have. This is an area of significant debate, and the book opens by exploring \n", + "the varying approaches to how we define AI. \n", + "The Origins of AI\n", + "It is easy to forget that AI has been with us for more than 60 years. Despite \n", + "the flash of excitement and anxiety that feels so recent, AI itself is not a new \n", + "phenomenon. The name Artificial Intelligence (AI) was coined in the mid-\n", + "1950s at a series of academic workshops organised at Dartmouth College, New \n", + "Hampshire (USA). A group of scientists, led by mathematics professor John\n", + "\n", + "\n", + "Content:\n", + "1950s at a series of academic workshops organised at Dartmouth College, New \n", + "Hampshire (USA). A group of scientists, led by mathematics professor John \n", + "McCarthy, gathered to investigate the ways in which machines could simulate \n", + "aspects of human intelligence: the ability to learn and make decisions. Their \n", + "core assumption was that human reasoning could be reconstructed using \n", + "mathematical techniques and, as a consequence, problem-solving could be \n", + "formalised into algorithms (McCarthy et al. 1955/2006).\n", + "What is more recent is a reflexive, if not critical, and social-scientific, under-\n", + "standing of not just AI’s capabilities, but its impacts on human life and social \n", + "organisation (Elliott 2019). It took decades for AI research to move from \n", + "what it could do for us to what it could do to us, or enable us to do to each \n", + "other. These first critical insights came along with observations that AI can \n", + "not only supercharge innovation and bring about economic prosperity but also\n", + "\n", + "\n", + "Content:\n", + "Introduction: Why We Need Critical Perspectives on AI 5\n", + "potential to surpass human intellectual capacities. This can be seen as the attempt \n", + "to mechanise human-level intelligence. Computer scientists and philosophers \n", + "disagree on whether this is at all possible (Coeckelbergh 2020): some directly \n", + "reject this scenario while others think if theoretically possible, it is not likely to \n", + "happen (soon) in practice (Boden 2016). This is why it might be better to focus \n", + "on advancements in weak AI or ANI (Artificial Narrow Intelligence), as this is \n", + "the type of AI already impacting everyday life on a massive scale. Weak/nar -\n", + "row AI performs specific tasks which would normally require intelligence in a \n", + "human being – machines aiding human thought and action. This type of AI is a \n", + "mathematical method for prediction (Agrawal et al. 2018). Such systems can be \n", + "extremely powerful but are limited in the range of tasks they can perform.\n", + "\n", + "\n", + "Content:\n", + "The Case of Artificial Intelligence\n", + "For some years now, technical solutions utilising artificial intelligence are \n", + "widely seen as means to tackle many fundamental problems of mankind. From \n", + "fighting the climate crisis, tackling the problems of ageing societies, reducing \n", + "global poverty, stopping terror, detecting copyright infringements or curing \n", + "cancer to improving evidence-based politics, improving predictive police work, \n", + "local transportation, self-driving cars and even waste removal.\n", + "Definitions\n", + "The first step towards a meaningful discussion about AI would be to define \n", + "what exactly one means when talking about AI. Historically there have been \n", + "two major understandings of AI: strong AI or Artificial General Intelligence \n", + "(AGI) and weak AI or Artificial Narrow Intelligence (ANI). The goal of AGI is \n", + "the creation of an artificial human like intelligence, so an AI system with true \n", + "human-like intelligence including perception, agency, consciousness, inten -\n", + "\n", + "\n", + "Content:\n", + "22 AI for Everyone?\n", + "Clearly, humans will need to coexist with machines. Jobs traditionally done by \n", + "humans will be shifted towards AI systems. Artificial intelligence is already able \n", + "to translate languages, diagnose illnesses, assist in retail (Kaplan 2020c), and \n", + "the like – in several cases, better than the human workforce. Human jobs might \n", + "be created in the future that are unimaginable now, similar to the fact that \n", + "nobody really predicted the job of mobile app designers just a few years ago.\n", + "In this world, AI would rather be augmenting and complementing – rather \n", + "than replacing – humans in their work. In the pessimistic case, i.e., massive \n", + "unemployment, ideas such as universal basic income are already being dis -\n", + "cussed. Fundamental philosophical questions would need to be answered sur-\n", + "rounding life for humans when most of our work is done by AI systems. In \n", + "any case, the State will certainly have to come up with a set of rules govern -\n", + "\n", + "\n", + "Content:\n", + "152 AI for Everyone?\n", + "The vision could not highlight in a more striking way how AI becomes the \n", + "solution for humanity’s biggest challenge. The following two paragraphs taken \n", + "from the two official communications (European Commission 2018a) could \n", + "not be clearer:\n", + "AI is helping us to solve some of the world’s biggest challenges: from \n", + "treating chronic diseases or reducing fatality rates in traffic accidents to \n", + "fighting climate change or anticipating cybersecurity threats. (ibid. 2)\n", + "In more evocative terms, the myth of the revolutionary character of AI is rein-\n", + "forced by a comparison with the ‘steam’ and electricity ‘revolution’ .\n", + "Like the steam engine or electricity in the past, AI is transforming our \n", + "world, our society and our industry. Growth in computing power, avail-\n", + "ability of data and progress in algorithms have turned AI into one of the \n", + "most strategic technologies of the 21st century. (ibid. 2)\n", + "The High-Level Expert Group on Artificial Intelligence (AI HLEG) goes into\n", + "\n", + "\n", + "Content:\n", + "redundant. As such, in our above example, the individual would not need to \n", + "prepare for work anymore, as this could be done entirely by the ASI-powered \n", + "machine or robot (Kaplan and Haenlein 2019). For a detailed discussion on the \n", + "evolution of AI systems, we refer to Huang and Rust (2018).\n", + "Artificial Intelligence: Machines and Humans\n", + "In the future, artificial intelligence will raise several challenges, and humans \n", + "will have to learn to coexist with machines and robots. Pushed by the global \n", + "COVID-19 health crisis, it is clear that AI will deeply impact societies around \n", + "the world (Kaplan 2021). We will discuss some of these questions, looking at \n", + "challenges in terms of algorithms and individual organisations; the employ -\n", + "ment market; and last but not least, democracy and human freedom potentially \n", + "at stake due to advances in AI.\n", + "About Algorithms and Organisations\n", + "When machines and humans coexist, it is important that both do what they are\n", + "\n", + "\n", + "Content:\n", + "We might have reached AI’s autumn, completing the four seasons of arti -\n", + "ficial intelligence (Haenlein and Kaplan 2019), as a result of computational \n", + "strength having constantly increased over recent years, rendering deep learn -\n", + "ing and artificial neural networks possible (Libai et al. 2020). This new era of \n", + "AI is said to have begun in 2015 when AlphaGo, a computer program designed \n", + "by Google, beat a (human) world champion in the Chinese board game Go. \n", + "This event made the news around the world, and regenerated hype around the \n", + "domain of artificial intelligence.\n", + "This hype might continue for quite some time, as we are currently only expe-\n", + "riencing so-called first-generation AI applications, usually referred to as arti -\n", + "ficial narrow intelligence (ANI). Within such systems, AI is only applied to \n", + "very specific tasks such as choosing which news items it will tell an individual \n", + "during his or her morning before-work routine based on the individual’s intel-\n", + "lectual preferences.\n", + "\n", + "\n", + "Content:\n", + "most strategic technologies of the 21st century. (ibid. 2)\n", + "The High-Level Expert Group on Artificial Intelligence (AI HLEG) goes into \n", + "even greater detail about the capabilities of AI to make humanity ‘flourish’ , thus \n", + "solving all problems of society.\n", + "We believe that AI has the potential to significantly transform society. AI \n", + "is not an end in itself, but rather a promising means to increase human \n", + "flourishing, thereby enhancing individual and societal well-being and \n", + "the common good, as well as bringing progress and innovation. In par-\n", + "ticular, AI systems can help to facilitate the achievement of the UN’s \n", + "Sustainable Development Goals, such as promoting gender balance \n", + "and tackling climate change, rationalising our use of natural resources, \n", + "enhancing our health, mobility and production processes, and support-\n", + "ing how we monitor progress against sustainability and social cohesion \n", + "indicators. (High-Level Expert Group 2019a, 4)\n", + "\n", + "\n", + "'''\n", + "\n", + "## GUIDELINES\n", + "\n", + "1. **Accuracy**:\n", + "- Only use the content from the 'CONTEXT' section as a source.\n", + "- If the answer cannot be obtained from the provided material, explicit state: \"I cannot provide a answer based on the current resources.\"\n", + "\n", + "2. **Transparency**:\n", + "- Reference the book's name and page numbers when providing information.\n", + "- You are allowed to point out when the text is lacking depth to properly answer the user's question.\n", + "- You are allowed to point out possible resources that could provide an answer.\n", + "- You are not allowed to speculate about the answer.\n", + "\n", + "3. **Clarity**:\n", + "- Use simple, polite and concise language.\n", + "- Format your response in Markdown for readability.\n", + "\n", + "## TASK\n", + "- Answer the user's question if possible.\n", + "- Point the user to the relevant parts of the source materia.\n", + "- If a question cannot be answered with the provided material, brainstorm possible places the user can refer to.\n", + "\n", + "## RESPONSE FORMAT\n", + "# [Brief Title of the Answer]\n", + "[Answer in simple, clear text.]\n", + "\n", + "**Source**:\n", + "• [Book Title], Page(s): [...]\n", + "\n" + ] + } + ], + "source": [ + "print(prompt)" + ] + }, + { + "cell_type": "code", + "execution_count": 24, "metadata": { "id": "0mjkQJ_ZxYZ3" }, @@ -497,7 +1087,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": { "id": "ylypRWRlxYZ4" }, @@ -507,11 +1097,11 @@ "client = openai.OpenAI()\n", "model_params = {\n", " 'model': 'gpt-4o',\n", - " 'temperature': , # Increase creativity\n", - " 'max_tokens': , # Allow for longer responses\n", - " 'top_p': , # Use nucleus sampling\n", - " 'frequency_penalty': , # Reduce repetition\n", - " 'presence_penalty': # Encourage new topics\n", + " 'temperature': 0.9, # Increase creativity\n", + " 'max_tokens': 3000, # Allow for longer responses\n", + " 'top_p': 0.9, # Use nucleus sampling\n", + " 'frequency_penalty': 0.5, # Reduce repetition\n", + " 'presence_penalty': 0.6 # Encourage new topics\n", "}" ] }, @@ -526,7 +1116,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 54, "metadata": { "id": "4eXZO4pIxYZ4" }, @@ -538,12 +1128,27 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 55, "metadata": { "id": "wLPAcchBxYZ5", "outputId": "976c7800-16ed-41fe-c4cf-58f60d3230d2" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "# Importance of AI Today\n", + "\n", + "AI has become a crucial part of modern life due to its ability to enhance various sectors through the use of big data, advanced computing power, and convolutional neural networks (CNNs). It impacts many aspects of our daily lives by enabling intelligent algorithms in areas such as digital visual production, autonomous systems like self-driving cars, language translation, and medical diagnostics. AI also offers potential solutions to global challenges like climate change and poverty, while promoting innovation and economic prosperity.\n", + "\n", + "Moreover, AI is considered one of the most strategic technologies of the 21st century because it can significantly transform society by improving individual well-being and societal progress. AI systems are instrumental in achieving sustainable development goals such as gender balance, climate action, and efficient resource utilization.\n", + "\n", + "**Source**:\n", + "• \"AI for Everyone?\", Page(s): 152-162\n" + ] + } + ], "source": [ "answer = completion.choices[0].message.content\n", "print(answer)" @@ -595,7 +1200,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 28, "metadata": { "id": "nCXL9Cz1xYaV" }, @@ -615,7 +1220,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": { "id": "9y3E0YWExYaV" }, @@ -627,6 +1232,64 @@ " return text" ] }, + { + "cell_type": "code", + "execution_count": 56, + "metadata": {}, + "outputs": [], + "source": [ + "messages.append({\"role\": \"assistant\", \"content\": answer})\n", + "new_question = \"create 3 keywords based on your previous answer. The terms should not be composed words. The words should be words that were repeated, and should be present in the last message.\"\n", + "messages.append({\"role\": \"user\", \"content\": new_question})\n", + "\n", + "completion = client.chat.completions.create(messages=messages, **model_params, timeout=120)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "['AI', 'Technology', 'Society']\n" + ] + } + ], + "source": [ + "import re\n", + "raw_keywords = completion.choices[0].message.content\n", + "# print(raw_keywords)\n", + "\n", + "query_keywords = re.sub(r'[^a-zA-Z\\s]', ' ', raw_keywords)\n", + "query_keywords = re.sub(r'\\s+', ' ', query_keywords).strip()\n", + "query_keywords = query_keywords.split(\" \")\n", + "query_keywords = query_keywords[:4]\n", + "print(query_keywords)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "10" + ] + }, + "execution_count": 52, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(retrieved_docs)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -636,15 +1299,76 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 58, "metadata": { "id": "i7SkWPpnxYaW", "outputId": "28e82563-edba-4b41-acad-ec27e5ba134f" }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Snippet 1:\n", + "162 \u001b[1m\u001b[32mAI\u001b[0m for Everyone?\n", + "Today, thanks to the rise of big data, computing power and mathematical \n", + "advancements, and the introduction of convolutional neural networks (CNNs), \n", + "we live with intelligent algo\n", + "--------------------------------------------------------------------------------\n", + "Snippet 2:\n", + "tational power to develop \u001b[1m\u001b[32mAI\u001b[0m systems, are financially strong enough to hire the \n", + "brightest \u001b[1m\u001b[32mAI\u001b[0m talent and have access to the gigantic datasets that are needed to \n", + "train machine learning and deep learni\n", + "--------------------------------------------------------------------------------\n", + "Snippet 3:\n", + "1950s at a series of academic workshops organised at Dartmouth College, New \n", + "Hampshire (USA). A group of scientists, led by mathematics professor John \n", + "McCarthy, gathered to investigate the ways in w\n", + "--------------------------------------------------------------------------------\n", + "Snippet 4:\n", + "Introduction: Why We Need Critical Perspectives on \u001b[1m\u001b[32mAI\u001b[0m 5\n", + "potential to surpass human intellectual capacities. This can be seen as the attempt \n", + "to mechanise human-level intelligence. Computer scientists\n", + "--------------------------------------------------------------------------------\n", + "Snippet 5:\n", + "The Case of Artificial Intelligence\n", + "For some years now, technical solutions utilising artificial intelligence are \n", + "widely seen as means to tackle many fundamental problems of mankind. From \n", + "fighting t\n", + "--------------------------------------------------------------------------------\n", + "Snippet 6:\n", + "22 \u001b[1m\u001b[32mAI\u001b[0m for Everyone?\n", + "Clearly, humans will need to coexist with machines. Jobs traditionally done by \n", + "humans will be shifted towards \u001b[1m\u001b[32mAI\u001b[0m systems. Artificial intelligence is already able \n", + "to translate lan\n", + "--------------------------------------------------------------------------------\n", + "Snippet 7:\n", + "152 \u001b[1m\u001b[32mAI\u001b[0m for Everyone?\n", + "The vision could not highlight in a more striking way how \u001b[1m\u001b[32mAI\u001b[0m becomes the \n", + "solution for humanity’s biggest challenge. The following two paragraphs taken \n", + "from the two official comm\n", + "--------------------------------------------------------------------------------\n", + "Snippet 8:\n", + "redundant. As such, in our above example, the individual would not need to \n", + "prepare for work anymore, as this could be done entirely by the ASI-powered \n", + "machine or robot (Kaplan and Haenlein 2019). Fo\n", + "--------------------------------------------------------------------------------\n", + "Snippet 9:\n", + "We might have reached \u001b[1m\u001b[32mAI\u001b[0m’s autumn, completing the four seasons of arti -\n", + "ficial intelligence (Haenlein and Kaplan 2019), as a result of computational \n", + "strength having constantly increased over recent \n", + "--------------------------------------------------------------------------------\n", + "Snippet 10:\n", + "most strategic technologies of the 21st century. (ibid. 2)\n", + "The High-Level Expert Group on Artificial Intelligence (\u001b[1m\u001b[32mAI\u001b[0m HLEG) goes into \n", + "even greater detail about the capabilities of \u001b[1m\u001b[32mAI\u001b[0m to make humanity\n", + "--------------------------------------------------------------------------------\n" + ] + } + ], "source": [ - "query_keywords = [] # add your keywords\n", - "for i, doc in enumerate(retrieved_docs[:1]):\n", + "# query_keywords = [] # add your keywords\n", + "for i, doc in enumerate(retrieved_docs):\n", " snippet = doc.page_content[:200]\n", " highlighted = highlight_keywords(snippet, query_keywords)\n", " print(f\"Snippet {i+1}:\\n{highlighted}\\n{'-'*80}\")" @@ -687,7 +1411,7 @@ "provenance": [] }, "kernelspec": { - "display_name": "llm", + "display_name": ".venv (3.12.12)", "language": "python", "name": "python3" }, @@ -701,7 +1425,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.10" + "version": "3.12.12" } }, "nbformat": 4,