Skip to content

Commit

Permalink
* adding username to .creds
Browse files Browse the repository at this point in the history
* formatting
  • Loading branch information
mogres committed Jul 5, 2023
1 parent b5b38c9 commit e921c35
Show file tree
Hide file tree
Showing 6 changed files with 54 additions and 33 deletions.
12 changes: 5 additions & 7 deletions cellpack/autopack/DBRecipeHandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,9 @@ def as_dict(self):

@staticmethod
def get_gradient_reference(downloaded_data, db):
if "gradient" in downloaded_data and db.is_reference(downloaded_data["gradient"]):
if "gradient" in downloaded_data and db.is_reference(
downloaded_data["gradient"]
):
gradient_key = downloaded_data["gradient"]
downloaded_data["gradient"], _ = db.get_doc_by_ref(gradient_key)

Expand Down Expand Up @@ -151,12 +153,8 @@ def resolve_local_regions(self, local_data, recipe_data, db):
"object"
] = prep_recipe_data["objects"][obj_item["name"]]
# replace gradient reference with gradient data
obj_data = local_data["regions"][region_name][index][
"object"
]
if "gradient" in obj_data and isinstance(
obj_data["gradient"], str
):
obj_data = local_data["regions"][region_name][index]["object"]
if "gradient" in obj_data and isinstance(obj_data["gradient"], str):
local_data["regions"][region_name][index]["object"][
"gradient"
] = prep_recipe_data["gradients"][obj_data["gradient"]]
Expand Down
17 changes: 10 additions & 7 deletions cellpack/autopack/FirebaseHandler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from firebase_admin import credentials, firestore
from cellpack.autopack.loaders.utils import read_json_file, write_json_file


class FirebaseHandler(object):
"""
Retrieve data and perform common tasks when working with firebase.
Expand All @@ -18,25 +19,27 @@ def __init__(self):
@staticmethod
def doc_to_dict(doc):
return doc.to_dict()

@staticmethod
def write_creds_path():
path = ast.literal_eval(input("provide path to firebase credentials: "))
print(path)
data = read_json_file(path)
if data is None:
raise ValueError("The path to your credentials doesn't exist")
firebase_cred = {
"firebase": data
}
write_json_file("./.creds", firebase_cred)
firebase_cred = {"firebase": data}
creds = read_json_file("./.creds")
if creds is None:
write_json_file("./.creds", firebase_cred)
else:
creds["firebase"] = data
write_json_file("./.creds", creds)
return firebase_cred


@staticmethod
def get_creds():
creds = read_json_file("./.creds")
if creds is None:
if creds is None or "firebase" not in creds:
creds = FirebaseHandler.write_creds_path()
return creds["firebase"]

Expand Down
12 changes: 12 additions & 0 deletions cellpack/autopack/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import re
import shutil
from os import path, environ
import pwd
from pathlib import Path
import urllib.request as urllib
from collections import OrderedDict
Expand All @@ -50,6 +51,7 @@
from cellpack.autopack.interface_objects.meta_enum import MetaEnum
from cellpack.autopack.FirebaseHandler import FirebaseHandler
from cellpack.autopack.DBRecipeHandler import DBRecipeHandler
from cellpack.autopack.loaders.utils import read_json_file, write_json_file


packageContainsVFCommands = 1
Expand Down Expand Up @@ -539,6 +541,15 @@ def clearCaches(*args):
print("problem cleaning ", cache_dir[k])


def write_username_to_creds():
username = pwd.getpwuid(os.getuid())[0]
creds = read_json_file("./.creds")
if creds is None or "username" not in creds:
creds = {}
creds["username"] = username
write_json_file("./.creds", creds)


# we should read a file to fill the RECIPE Dictionary
# so we can add some and write/save setup
# afdir or user_pref
Expand All @@ -547,6 +558,7 @@ def clearCaches(*args):
checkPath()
updatePathJSON()
checkRecipeAvailable()
write_username_to_creds()
log.info("path are updated ")

log.info(f"currently number recipes is {len(RECIPES)}")
Expand Down
2 changes: 1 addition & 1 deletion cellpack/autopack/loaders/migrate_v1_to_v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ def convert(old_recipe):
new_recipe["name"] = old_recipe["recipe"]["name"]
new_recipe["bounding_box"] = old_recipe["options"]["boundingBox"]
objects_dict = {}
#TODO: check if composition structure is correct
# TODO: check if composition structure is correct
composition = {"space": {"regions": {}}}
if "cytoplasme" in old_recipe:
outer_most_region_array = []
Expand Down
41 changes: 24 additions & 17 deletions cellpack/autopack/loaders/recipe_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ def _get_gradient_data(obj_data, obj_dict, grad_dict):

@staticmethod
def _is_obj(comp_or_obj):
# if the top level of a downloaded comp doesn't have the key `name`, it's an obj
# if the top level of a downloaded comp doesn't have the key `name`, it's an obj
return not comp_or_obj.get("name") and "object" in comp_or_obj

def _collect_and_sort_data(self, comp_data):
Expand Down Expand Up @@ -208,14 +208,16 @@ def _collect_and_sort_data(self, comp_data):
# RecipeLoader._remove_name_key(object_copy)
if "regions" in comp_value and comp_value["regions"] is not None:
for region_name in comp_value["regions"]:
composition[comp_name]["regions"]={}
composition[comp_name].setdefault("regions",{})[region_name]=[]
composition[comp_name]["regions"] = {}
composition[comp_name].setdefault("regions", {})[region_name] = []
for region_item in comp_value["regions"][region_name]:
if RecipeLoader._is_obj(region_item):
composition[comp_name]["regions"][region_name].append({
"object": region_item["object"].get("name"),
"count": region_item.get("count")
})
if RecipeLoader._is_obj(region_item):
composition[comp_name]["regions"][region_name].append(
{
"object": region_item["object"].get("name"),
"count": region_item.get("count"),
}
)
object_copy = copy.deepcopy(region_item["object"])
objects[object_copy["name"]] = object_copy
if "gradient" in object_copy and isinstance(
Expand All @@ -225,12 +227,15 @@ def _collect_and_sort_data(self, comp_data):
object_copy, objects, gradients
)
# RecipeLoader._remove_name_key(object_copy)
else:
composition[comp_name]["regions"][region_name].append(region_item["name"])
else:
composition[comp_name]["regions"][region_name].append(
region_item["name"]
)
return objects, gradients, composition


def _compile_recipe_from_firebase(self, db_recipe_data, obj_dict, grad_dict, comp_dict):
def _compile_recipe_from_firebase(
self, db_recipe_data, obj_dict, grad_dict, comp_dict
):
"""
Compile recipe data from firebase recipe data into a ready-to-pack structure
"""
Expand All @@ -241,17 +246,19 @@ def _compile_recipe_from_firebase(self, db_recipe_data, obj_dict, grad_dict, com
recipe_data["bounding_box"] = db_recipe_data["bounding_box"]
recipe_data["objects"] = obj_dict
if grad_dict:
recipe_data["gradients"] = [
{**v} for v in grad_dict.values()
]
recipe_data["gradients"] = [{**v} for v in grad_dict.values()]
recipe_data["composition"] = comp_dict
return recipe_data

def _read(self):
new_values, database_name = autopack.load_file(self.file_path, cache="recipes")
if database_name == "firebase":
objects, gradients, composition = self._collect_and_sort_data(new_values["composition"])
new_values = self._compile_recipe_from_firebase(new_values, objects, gradients, composition)
objects, gradients, composition = self._collect_and_sort_data(
new_values["composition"]
)
new_values = self._compile_recipe_from_firebase(
new_values, objects, gradients, composition
)
recipe_data = RecipeLoader.default_values.copy()
recipe_data = deep_merge(recipe_data, new_values)
recipe_data["format_version"] = RecipeLoader._sanitize_format_version(
Expand Down
3 changes: 2 additions & 1 deletion cellpack/autopack/loaders/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,15 @@ def create_output_dir(out_base_folder, recipe_name, sub_dir=None):
os.makedirs(output_folder, exist_ok=True)
return output_folder


def read_json_file(path):
if not Path(path).exists():
return None
with open(path, "r") as file_name:
return json.load(file_name)


def write_json_file(path, data):
Path(path).parent.mkdir(exist_ok=True, parents=True)
with open(path, "w") as file_name:
json.dump(data, file_name)

0 comments on commit e921c35

Please sign in to comment.