Skip to content

Instantly share code, notes, and snippets.

@eramax
Last active December 18, 2023 18:55
Show Gist options
  • Save eramax/8533181ad841e4612041c42d154df003 to your computer and use it in GitHub Desktop.
Save eramax/8533181ad841e4612041c42d154df003 to your computer and use it in GitHub Desktop.
ollama-colab.ipynb
Display the source blob
Display the rendered blob
Raw
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": [],
"gpuType": "T4",
"authorship_tag": "ABX9TyN36v4/9+7UW65X3MdevarM",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
},
"accelerator": "GPU"
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/gist/eramax/8533181ad841e4612041c42d154df003/ollama.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "KWH_unS29M1Q",
"outputId": "fc3dd376-0899-4029-8147-75ab686763b8"
},
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
" % Total % Received % Xferd Average Speed Time Time Time Current\n",
" Dload Upload Total Spent Left Speed\n",
"100 7983 0 7983 0 0 26910 0 --:--:-- --:--:-- --:--:-- 26969\n",
">>> Downloading ollama...\n",
"############################################################################################# 100.0%\n",
">>> Installing ollama to /usr/local/bin...\n",
">>> Creating ollama user...\n",
">>> Adding current user to ollama group...\n",
">>> Creating ollama systemd service...\n",
"WARNING: Unable to detect NVIDIA GPU. Install lspci or lshw to automatically detect and install NVIDIA CUDA drivers.\n",
">>> The Ollama API is now available at 0.0.0.0:11434.\n",
">>> Install complete. Run \"ollama\" from the command line.\n",
"Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (3.9.1)\n",
"Collecting pyngrok\n",
" Downloading pyngrok-7.0.3-py3-none-any.whl (21 kB)\n",
"Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp) (23.1.0)\n",
"Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp) (6.0.4)\n",
"Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp) (1.9.4)\n",
"Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp) (1.4.0)\n",
"Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp) (1.3.1)\n",
"Requirement already satisfied: async-timeout<5.0,>=4.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp) (4.0.3)\n",
"Requirement already satisfied: PyYAML in /usr/local/lib/python3.10/dist-packages (from pyngrok) (6.0.1)\n",
"Requirement already satisfied: idna>=2.0 in /usr/local/lib/python3.10/dist-packages (from yarl<2.0,>=1.0->aiohttp) (3.6)\n",
"Installing collected packages: pyngrok\n",
"Successfully installed pyngrok-7.0.3\n"
]
}
],
"source": [
"# Download and install ollama to the system\n",
"!curl https://ollama.ai/install.sh | sh\n",
"\n",
"!pip install aiohttp pyngrok\n",
"\n",
"import os\n",
"import asyncio\n",
"\n",
"# Set LD_LIBRARY_PATH so the system NVIDIA library\n",
"os.environ.update({'LD_LIBRARY_PATH': '/usr/lib64-nvidia'})\n",
"\n",
"async def run_process(cmd):\n",
" print('>>> starting', *cmd)\n",
" p = await asyncio.subprocess.create_subprocess_exec(\n",
" *cmd,\n",
" stdout=asyncio.subprocess.PIPE,\n",
" stderr=asyncio.subprocess.PIPE,\n",
" )\n",
"\n",
" async def pipe(lines):\n",
" async for line in lines:\n",
" print(line.strip().decode('utf-8'))\n",
"\n",
" await asyncio.gather(\n",
" pipe(p.stdout),\n",
" pipe(p.stderr),\n",
" )"
]
},
{
"cell_type": "code",
"source": [
"await asyncio.gather(\n",
" run_process(['ollama', 'serve']),\n",
" run_process(['ngrok', 'http', '--log', 'stderr', '11434']),\n",
")\n",
"\n",
"#in local machine : export OLLAMA_HOST=https://03c1-35-240-226-254.ngrok.io/"
],
"metadata": {
"id": "AW_QGAbkRbdF"
},
"execution_count": null,
"outputs": []
}
]
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment