diff --git a/experimental/ee_agent.py b/experimental/ee_agent.py index 65e0f31c4..9ed096d17 100755 --- a/experimental/ee_agent.py +++ b/experimental/ee_agent.py @@ -145,7 +145,7 @@ class Gemini(LLM): def __init__(self): self._text_model = genai.GenerativeModel('gemini-1.5-pro-latest') - self._image_model = genai.GenerativeModel('gemini-pro-vision') + self._image_model = genai.GenerativeModel('gemini-1.5-pro-latest') self._chat_proxy = self._text_model.start_chat(history=[]) def chat(self, question: str, temperature: Optional[float] = None) -> str: diff --git a/experimental/ee_genie.ipynb b/experimental/ee_genie.ipynb index 79e2ed3f1..6e067abd0 100644 --- a/experimental/ee_genie.ipynb +++ b/experimental/ee_genie.ipynb @@ -3,6 +3,10 @@ { "cell_type": "markdown", "source": [ + "**NOTE**: a more up-to-date version if this notebook\n", + "can now be found in [geemap](https://github.com/gee-community/geemap/blob/master/docs/notebooks/149_gemini.ipynb).\n", + "\n", + "\n", "EE Genie as an interactive Earth Engine GenAI assistant that works\n", "with geemap in Colab and can retrieve and analyze images.\n", "\n", @@ -17,7 +21,7 @@ "\n", "Some other queries that work most of the time are commented out in the code where `command_input` is defined.\n", "\n", - "The agent fetches the same tiles that are loaded on geemap (you will see them flashing in the upper left-hand corner when this happens), then stitches them together into one large image and sends it to a vision model for textual description.\n", + "The agent fetches the same tiles that are loaded on geemap (you will see them flashing in the upper left-hand corner when this happens), then stitches them together into one large image and sends it to a model for textual description.\n", "\n", "**Safety**\n", "\n", @@ -124,7 +128,7 @@ "analysis_model = None\n", "map_dirty = False\n", "\n", - "image_model = genai.GenerativeModel('gemini-pro-vision')" + "image_model = genai.GenerativeModel('gemini-1.5-pro-latest')" ], "metadata": { "colab": { @@ -196,7 +200,7 @@ " #value='a sea port',\n", " #value='flood consequences',\n", " #value='show an interesting modis composite with the relevant visualization and analyze it over Costa Rica',\n", - " description='🙂',\n", + " description='❓',\n", " layout=widgets.Layout(width='100%', height='50px')\n", ")\n", "\n", @@ -905,7 +909,7 @@ " 'Previous attempt returned a RECITATION error. '\n", " 'Rephrase the answer to avoid it.')\n", " with chat_output:\n", - " command_input.description = '😡'\n", + " command_input.description = '🆁'\n", " time.sleep(1)\n", " with chat_output:\n", " command_input.description = '🤔'\n", @@ -953,7 +957,7 @@ " 'Previous attempt returned a RECITATION error. '\n", " 'Rephrase the answer to avoid it.')\n", " with chat_output:\n", - " command_input.description = '😡'\n", + " command_input.description = '🆁'\n", " time.sleep(1)\n", " with chat_output:\n", " command_input.description = '🤔'\n", @@ -1048,7 +1052,7 @@ "def on_submit(widget):\n", " global map_dirty\n", " map_dirty = False\n", - " command_input.description = '🙂'\n", + " command_input.description = '❓'\n", " command = widget.value\n", " if not command:\n", " command = 'go on'\n", @@ -1064,7 +1068,7 @@ " if map_dirty:\n", " command_input.description = '🙏'\n", " else:\n", - " command_input.description = '🙂'\n", + " command_input.description = '❓'\n", " set_cursor_default()\n", " response = response.strip()\n", " if not response:\n", @@ -1145,11 +1149,11 @@ "\n", "# Display the layout\n", "display(ui)\n", - "print('🙂 = waiting for user input')\n", + "print('❓ = waiting for user input')\n", "print('🙏 = waiting for user to hit enter after calling set_center()')\n", "print('🤔 = thinking')\n", "print('💤 = sleeping due to retries')\n", - "print('😡 = Gemini recitation error')" + "print('🆁 = Gemini recitation error')" ], "metadata": { "id": "5YUIQVyJ8VJc" @@ -1170,4 +1174,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +}