Skip to content

Commit

Permalink
Merge pull request #6 from JAlvarezJarreta/bufixes
Browse files Browse the repository at this point in the history
Bufixes
  • Loading branch information
JAlvarezJarreta authored Oct 19, 2023
2 parents 1ab6d3a + 88adb89 commit 58727d9
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 4 deletions.
3 changes: 1 addition & 2 deletions scripts/get_taxon_flat.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
pymysql.install_as_MySQLdb()


def get_taxon_ids(url):
def get_taxon_ids(species_url: str = "https://metazoa.ensembl.org/species.html"):
"""
The function scrapes the metazoa taxonomy ids data
from a fixed url: https://metazoa.ensembl.org/species.html
Expand All @@ -19,7 +19,6 @@ def get_taxon_ids(url):
"""

species_url = "https://metazoa.ensembl.org/species.html"
response = requests.get(species_url)
soup = BeautifulSoup(response.text, "lxml")

Expand Down
4 changes: 2 additions & 2 deletions src/taxon_search/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,8 +35,8 @@ def index(request):
# call the elastic search function in search.py
search_results = search_species(q)

name_class = [d["name_class"] for d in search_results][0]
rank = [d["rank"] for d in search_results][0]
name_class = [d["name_class"] for d in search_results][0] if search_results else ""
rank = [d["rank"] for d in search_results][0] if search_results else ""

matched_species = set([d["species_taxon_id"] for d in search_results])
species_names = EnsemblMetadata.objects.filter(taxonomy_id__in=matched_species)
Expand Down

0 comments on commit 58727d9

Please sign in to comment.