Skip to content

Commit

Permalink
Merge pull request #143 from catsmanac/Dev
Browse files Browse the repository at this point in the history
Add logging during config and allow more time for slow envoy, make timeouts configurable
  • Loading branch information
catsmanac authored Aug 22, 2023
2 parents 4d9cc31 + f236f3f commit a899028
Show file tree
Hide file tree
Showing 7 changed files with 160 additions and 34 deletions.
9 changes: 8 additions & 1 deletion custom_components/enphase_envoy_custom/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@
SCAN_INTERVAL = timedelta(seconds=60)
STORAGE_KEY = "envoy"
STORAGE_VERSION = 1
FETCH_RETRIES = 1
FETCH_TIMEOUT_SECONDS = 30
FETCH_HOLDOFF_SECONDS = 0
COLLECTION_TIMEOUT_SECONDS = 55

_LOGGER = logging.getLogger(__name__)

Expand All @@ -47,13 +51,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
enlighten_serial_num=config[CONF_SERIAL],
https_flag='s' if config.get(CONF_USE_ENLIGHTEN, False) else '',
store=store,
fetch_retries=options.get("data_fetch_retry_count", FETCH_RETRIES),
fetch_timeout_seconds=options.get("data_fetch_timeout_seconds", FETCH_TIMEOUT_SECONDS),
fetch_holdoff_seconds=options.get("data_fetch_holdoff_seconds", FETCH_HOLDOFF_SECONDS),
)
await envoy_reader._sync_store()

async def async_update_data():
"""Fetch data from API endpoint."""
data = {}
async with async_timeout.timeout(30):
async with async_timeout.timeout(options.get("data_collection_timeout_seconds", COLLECTION_TIMEOUT_SECONDS)):
try:
await envoy_reader.getData()
except httpx.HTTPStatusError as err:
Expand Down
44 changes: 38 additions & 6 deletions custom_components/enphase_envoy_custom/config_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,15 +37,21 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> EnvoyRead
# async_client=get_async_client(hass),
use_enlighten_owner_token=data.get(CONF_USE_ENLIGHTEN, False),
enlighten_serial_num=data[CONF_SERIAL],
https_flag='s' if data.get(CONF_USE_ENLIGHTEN,False) else ''
https_flag='s' if data.get(CONF_USE_ENLIGHTEN,False) else '',
fetch_timeout_seconds=60
)

try:
await envoy_reader.getData()
except httpx.HTTPStatusError as err:
_LOGGER.warning("Validate input, getdata returned HTTPStatusError: %s",err)
raise InvalidAuth from err
except (RuntimeError, httpx.HTTPError) as err:
except (httpx.HTTPError) as err:
_LOGGER.warning("Validate input, getdata returned HTTPError: %s",err)
raise CannotConnect from err
except (RuntimeError) as err:
_LOGGER.warning("Validate input, getdata returned RuntimeError: %s",err)
raise

return envoy_reader

Expand Down Expand Up @@ -79,7 +85,7 @@ def _async_generate_schema(self):
else:
schema[vol.Required(CONF_HOST)] = str

schema[vol.Optional(CONF_USERNAME, default=self.username or "envoy")] = str
schema[vol.Optional(CONF_USERNAME, default=self.username)] = str
schema[vol.Optional(CONF_PASSWORD, default="")] = str
schema[vol.Optional(CONF_SERIAL, default=self.unique_id)] = str
schema[vol.Optional(CONF_USE_ENLIGHTEN)] = bool
Expand Down Expand Up @@ -163,12 +169,14 @@ async def async_step_user(
return self.async_abort(reason="already_configured")
try:
envoy_reader = await validate_input(self.hass, user_input)
except CannotConnect:
except RuntimeError as rerr:
errors["base"] = "invalid_auth"
except CannotConnect as cerr:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
except Exception as exc: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception in validate input %s",exc)
errors["base"] = "unknown"
else:
data = user_input.copy()
Expand Down Expand Up @@ -231,6 +239,30 @@ async def async_step_user(self, user_input=None):
"data_interval", DEFAULT_SCAN_INTERVAL
),
): vol.All(vol.Coerce(int), vol.Range(min=5)),
vol.Optional(
"data_fetch_timeout_seconds",
default=self.config_entry.options.get(
"data_fetch_timeout_seconds", 30
),
): vol.All(vol.Coerce(int), vol.Range(min=5)),
vol.Optional(
"data_fetch_retry_count",
default=self.config_entry.options.get(
"data_fetch_retry_count", 1
),
): vol.All(vol.Coerce(int), vol.Range(min=1)),
vol.Optional(
"data_fetch_holdoff_seconds",
default=self.config_entry.options.get(
"data_fetch_holdoff_seconds", 0
),
): vol.All(vol.Coerce(int), vol.Range(min=0)),
vol.Optional(
"data_collection_timeout_seconds",
default=self.config_entry.options.get(
"data_collection_timeout_seconds", 55
),
): vol.All(vol.Coerce(int), vol.Range(min=30)),
}
return self.async_show_form(step_id="user", data_schema=vol.Schema(schema))

Expand Down
119 changes: 98 additions & 21 deletions custom_components/enphase_envoy_custom/envoy_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@
import xmltodict
from envoy_utils.envoy_utils import EnvoyUtils
from homeassistant.util.network import is_ipv6_address
import xmltodict

#
# Legacy parser is only used on ancient firmwares
Expand Down Expand Up @@ -102,6 +101,9 @@ def __init__( # pylint: disable=too-many-arguments
token_refresh_buffer_seconds=0,
store=None,
info_refresh_buffer_seconds=3600,
fetch_timeout_seconds=30,
fetch_holdoff_seconds=0,
fetch_retries=1,
):
"""Init the EnvoyReader."""
self.host = host.lower()
Expand Down Expand Up @@ -138,6 +140,9 @@ def __init__( # pylint: disable=too-many-arguments
self._store = store
self._store_data = {}
self._store_update_pending = False
self._fetch_timeout_seconds = fetch_timeout_seconds
self._fetch_holdoff_seconds = fetch_holdoff_seconds
self._fetch_retries = max(fetch_retries,1)

@property
def _token(self):
Expand Down Expand Up @@ -230,50 +235,78 @@ async def _update_endpoint(self, attr, url):

async def _async_fetch_with_retry(self, url, **kwargs):
"""Retry 3 times to fetch the url if there is a transport error."""
for attempt in range(3):
header = " <Blank Authorization Header> "
for attempt in range(self._fetch_retries + 1):
header = " <Blank Header> "
if self._authorization_header:
header = " <Authorization header with Token hidden> "
header = " <Token hidden> "
_LOGGER.debug(
"HTTP GET Attempt #%s: %s: use owner token: %s: Header:%s",
"HTTP GET Attempt #%s of %s: %s: use token: %s: Header:%s Timeout: %s Holdoff: %s",
attempt + 1,
self._fetch_retries + 1,
url,
self.use_enlighten_owner_token,
header,
self._fetch_timeout_seconds,
self._fetch_holdoff_seconds,
)
try:
async with self.async_client as client:
async with self.async_client as client:
try:
getstart = time.time()
resp = await client.get(
url, headers=self._authorization_header, timeout=30, **kwargs
url, headers=self._authorization_header, timeout=self._fetch_timeout_seconds, **kwargs
)
if resp.status_code == 401 and attempt < 2:
getend = time.time()
if resp.status_code == 401 and attempt < self._fetch_retries:
if self.use_enlighten_owner_token:
_LOGGER.debug(
"Received 401 from Envoy; refreshing cookies, attempt %s of 2:",
attempt+1
"Received 401 from Envoy; refreshing cookies, in attempt %s of %s:",
attempt+1,
self._fetch_retries + 1
)
could_refresh_cookies = await self._refresh_token_cookies()
if not could_refresh_cookies:
_LOGGER.debug(
"cookie refresh failed, getting token, attempt %s of 2:",
attempt+1
"cookie refresh failed, getting token, in attempt %s of %s:",
attempt+1,
self._fetch_retries + 1
)
await self._getEnphaseToken()
continue
# don't try token and cookies refresh for legacy envoy
else:
_LOGGER.debug(
"Received 401 from Envoy; retrying, attempt %s of 2",
attempt+1
"Received 401 from Envoy; retrying, attempt %s of %s",
attempt+1,
self._fetch_retries + 1
)
continue
_LOGGER.debug("Fetched (%s) from %s: %s: %s", attempt + 1, url, resp, resp.text)
_LOGGER.debug("Fetched (%s of %s) in %s sec from %s: %s: %s",
attempt + 1,
self._fetch_retries + 1,
round(getend - getstart,1),
url,
resp,
resp.text
)
if resp.status_code == 404:
return None
return resp
except httpx.TransportError:
if attempt == 2:
raise

except httpx.TimeoutException as exc:
if attempt == self._fetch_retries:
_LOGGER.warning("HTTP Timeout in fetch_with_retry, raising: %s",exc)
raise
# Sleep a bit and try once more
_LOGGER.warning("HTTP Timeout in fetch_with_retry, waiting %s sec: %s",self._fetch_holdoff_seconds,exc)
await asyncio.sleep(self._fetch_holdoff_seconds)
except Exception as exc:
if attempt == self._fetch_retries:
_LOGGER.warning("Error in fetch_with_retry, raising: %s",exc)
raise
# Sleep a bit and try once more
_LOGGER.warning("Error in fetch_with_retry, waiting %s sec: %s",self._fetch_holdoff_seconds,exc)
await asyncio.sleep(self._fetch_holdoff_seconds)


async def _async_post(self, url, data=None, cookies=None, **kwargs):
_LOGGER.debug("HTTP POST Attempt: %s", url)
Expand All @@ -299,7 +332,7 @@ async def _fetch_owner_token_json(self) :
}
resp = await client.post(ENLIGHTEN_AUTH_URL, data=payload_login, timeout=30)
if resp.status_code >= 400:
raise RuntimeError("Could not Authenticate via Enlighten")
raise RuntimeError(f"Could not Authenticate with Enlighten, status: {resp.status_code}, {resp}")

# now that we're in a logged in session, we can request the 1 year owner token via enlighten
login_data = resp.json()
Expand All @@ -312,7 +345,7 @@ async def _fetch_owner_token_json(self) :
ENLIGHTEN_TOKEN_URL, json=payload_token, timeout=30
)
if resp.status_code != 200:
raise RuntimeError("Could not get enlighten token")
raise RuntimeError(f"Could not get enlighten token, status: {resp.status_code}, {resp}")
return resp.text

async def _getEnphaseToken(self):
Expand Down Expand Up @@ -891,6 +924,50 @@ async def envoy_info(self):
device_data["Using-UseEnligthen"] = self.use_enlighten_owner_token
device_data["Using-InfoUpdateInterval"] = self.info_refresh_buffer_seconds
device_data["Using-hasgridstatus"] = self.has_grid_status
device_data["Using-FetchRetryCount"] = self._fetch_retries
device_data["Using-FetchTimeOut"] = self._fetch_timeout_seconds
device_data["Using-FetchHoldoff"] = self._fetch_holdoff_seconds

if self.endpoint_production_json_results:
device_data[
"Endpoint-production_json"
] = self.endpoint_production_json_results.text
else:
device_data[
"Endpoint-production_json"
] = self.endpoint_production_json_results
if self.endpoint_production_v1_results:
device_data[
"Endpoint-production_v1"
] = self.endpoint_production_v1_results.text
else:
device_data["Endpoint-production_v1"] = self.endpoint_production_v1_results
if self.endpoint_production_results:
device_data["Endpoint-production"] = self.endpoint_production_results.text
else:
device_data["Endpoint-production"] = self.endpoint_production_results
if self.endpoint_production_inverters:
device_data[
"Endpoint-production_inverters"
] = self.endpoint_production_inverters.text
else:
device_data[
"Endpoint-production_inverters"
] = self.endpoint_production_inverters
if self.endpoint_ensemble_json_results:
device_data[
"Endpoint-ensemble_json"
] = self.endpoint_ensemble_json_results.text
else:
device_data["Endpoint-ensemble_json"] = self.endpoint_ensemble_json_results
if self.endpoint_home_json_results:
device_data["Endpoint-home"] = self.endpoint_home_json_results.text
else:
device_data["Endpoint-home"] = self.endpoint_home_json_results
if self.endpoint_info_results:
device_data["Endpoint-info"] = self.endpoint_info_results.text
else:
device_data["Endpoint-info"] = self.endpoint_info_results

return device_data

Expand Down
2 changes: 1 addition & 1 deletion custom_components/enphase_envoy_custom/manifest.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"domain": "enphase_envoy",
"name": "Enphase Envoy (DEV)",
"name": "Enphase Envoy (DEV-TEST)",
"documentation": "https://github.com/briancmpbll/home_assistant_custom_envoy#readme",
"requirements": ["envoy-utils"],
"codeowners": ["@briancmpbll"],
Expand Down
9 changes: 7 additions & 2 deletions custom_components/enphase_envoy_custom/strings.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,15 @@
"user": {
"title": "Envoy options",
"data": {
"data_interval": "Time between entity updates [s]"
"data_interval": "Time between entity updates [s].",
"data_fetch_timeout_seconds": "Timeout for getting single Envoy data page [s], minimum 5.",
"data_fetch_retry_count": "How many retries in getting single Envoy data page. minium 1.",
"data_fetch_holdoff_seconds": "Time between 2 retries to get single Envoy data page[s], minimum 0.",
"data_collection_timeout_seconds": "Overall Timeout on getting all Envoy data pages[s], minimum 30."
},
"data_description": {
"data_interval": "Time between data updates, minimum 5 sec. After changes reload the envoy"
"data_interval": "Time between data updates, minimum 5 sec. After any change here or below reload the envoy.",
"data_collection_timeout_seconds": "If overall data collection takes more then this time it will be cancelled. Account for retries."
}
}
}
Expand Down
9 changes: 7 additions & 2 deletions custom_components/enphase_envoy_custom/translations/en.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,15 @@
"user": {
"title": "Envoy options",
"data": {
"data_interval": "Time between entity updates [s]"
"data_interval": "Time between entity updates [s].",
"data_fetch_timeout_seconds": "Timeout for getting single Envoy data page [s], minimum 5.",
"data_fetch_retry_count": "How many retries in getting single Envoy data page. minium 1.",
"data_fetch_holdoff_seconds": "Time between 2 retries to get single Envoy data page[s], minimum 0.",
"data_collection_timeout_seconds": "Overall Timeout on getting all Envoy data pages[s], minimum 30."
},
"data_description": {
"data_interval": "Time between data updates, minimum 5 sec. After changes reload the envoy"
"data_interval": "Time between data updates, minimum 5 sec. After any change here or below reload the envoy.",
"data_collection_timeout_seconds": "If overall data collection takes more then this time it will be cancelled. Account for retries."
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion hacs.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"name": "Enphase Envoy (DEV)",
"name": "Enphase Envoy (DEV-TEST)",
"render_readme": true,
"content_in_root": false
}

0 comments on commit a899028

Please sign in to comment.