diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 0fc9bcf16cf..922f0b5df42 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.9.0-beta.3 +current_version = 0.9.1-beta.2 tag = False tag_name = {new_version} commit = True diff --git a/.bumpversion_stable.cfg b/.bumpversion_stable.cfg index 4af9bb78db5..d529a21d6f2 100644 --- a/.bumpversion_stable.cfg +++ b/.bumpversion_stable.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.8.8 +current_version = 0.9.0 tag = False tag_name = {new_version} commit = True diff --git a/.github/workflows/cd-feature-branch.yml b/.github/workflows/cd-feature-branch.yml index 40fc583caad..9391998930f 100644 --- a/.github/workflows/cd-feature-branch.yml +++ b/.github/workflows/cd-feature-branch.yml @@ -346,8 +346,8 @@ jobs: - name: Copy helm repo files from Syft Repo run: | - cp packages/grid/helm/repo/index.yaml ghpages/helm/ cp packages/grid/helm/repo/syft-${{ needs.merge-docker-images.outputs.server_version }}.tgz ghpages/helm/ + cd ghpages/helm && helm repo index . --url https://openmined.github.io/PySyft/helm - name: Commit changes to gh-pages uses: EndBug/add-and-commit@v9 diff --git a/.github/workflows/cd-syft-dev.yml b/.github/workflows/cd-syft-dev.yml index 0b9ce3fe27d..04b03817b35 100644 --- a/.github/workflows/cd-syft-dev.yml +++ b/.github/workflows/cd-syft-dev.yml @@ -104,7 +104,7 @@ jobs: tags: | ${{ secrets.ACR_SERVER }}/openmined/syft-backend:dev ${{ secrets.ACR_SERVER }}/openmined/syft-backend:dev-${{ github.sha }} - ${{ secrets.ACR_SERVER }}/openmined/syft-backend:${{ steps.syft.outputs.SERVER_VERSION }} + ${{ secrets.ACR_SERVER }}/openmined/syft-backend:${{ steps.server.outputs.SERVER_VERSION }} - name: Build and push `syft-frontend` image to registry uses: docker/build-push-action@v6 @@ -115,7 +115,7 @@ jobs: tags: | ${{ secrets.ACR_SERVER }}/openmined/syft-frontend:dev ${{ secrets.ACR_SERVER }}/openmined/syft-frontend:dev-${{ github.sha }} - ${{ secrets.ACR_SERVER }}/openmined/syft-frontend:${{ steps.syft.outputs.SERVER_VERSION }} + ${{ secrets.ACR_SERVER }}/openmined/syft-frontend:${{ steps.server.outputs.SERVER_VERSION }} target: syft-ui-development - name: Build and push `syft-seaweedfs` image to registry @@ -127,7 +127,7 @@ jobs: tags: | ${{ secrets.ACR_SERVER }}/openmined/syft-seaweedfs:dev ${{ secrets.ACR_SERVER }}/openmined/syft-seaweedfs:dev-${{ github.sha }} - ${{ secrets.ACR_SERVER }}/openmined/syft-seaweedfs:${{ steps.syft.outputs.SERVER_VERSION }} + ${{ secrets.ACR_SERVER }}/openmined/syft-seaweedfs:${{ steps.server.outputs.SERVER_VERSION }} - name: Build and push `syft-enclave-attestation` image to registry uses: docker/build-push-action@v6 @@ -138,7 +138,7 @@ jobs: tags: | ${{ secrets.ACR_SERVER }}/openmined/syft-enclave-attestation:dev ${{ secrets.ACR_SERVER }}/openmined/syft-enclave-attestation:dev-${{ github.sha }} - ${{ secrets.ACR_SERVER }}/openmined/syft-enclave-attestation:${{ steps.syft.outputs.SERVER_VERSION }} + ${{ secrets.ACR_SERVER }}/openmined/syft-enclave-attestation:${{ steps.server.outputs.SERVER_VERSION }} - name: Build Helm Chart & Copy to infra if: github.ref == 'refs/heads/dev' || github.event.inputs.deploy-helm == 'true' diff --git a/.github/workflows/cd-syft.yml b/.github/workflows/cd-syft.yml index a186bd95061..977c51f30bb 100644 --- a/.github/workflows/cd-syft.yml +++ b/.github/workflows/cd-syft.yml @@ -557,8 +557,8 @@ jobs: - name: Copy helm repo files from Syft Repo run: | - cp packages/grid/helm/repo/index.yaml ghpages/helm/ cp packages/grid/helm/repo/syft-${{ steps.release_checks.outputs.syft_version }}.tgz ghpages/helm/ + cd ghpages/helm && helm repo index . --url https://openmined.github.io/PySyft/helm - name: Commit changes to gh-pages uses: EndBug/add-and-commit@v9 diff --git a/.github/workflows/pr-tests-stack.yml b/.github/workflows/pr-tests-stack.yml index 44111145e99..789c1a28721 100644 --- a/.github/workflows/pr-tests-stack.yml +++ b/.github/workflows/pr-tests-stack.yml @@ -59,10 +59,10 @@ jobs: - name: Run syft backend base image building test if: steps.changes.outputs.stack == 'true' timeout-minutes: 60 - # run: | - # echo "Skipping pr image test" run: | - tox -e backend.test.basecpu + echo "Skipping pr image test" + # run: | + # tox -e backend.test.basecpu pr-tests-syft-integration: strategy: diff --git a/README.md b/README.md index 78d48b156f4..dfb7218673b 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,30 @@ -
+


- - Syft Logo + + Syft Logo -Perform data science on `data` that remains in `someone else's` server +

Data Science on data you are not allowed to see

+ +PySyft enables a new way to do data science, where you can use non-public information, without seeing nor obtaining a copy of the data itself. All you need is to connect to a Datasite! + +Datasites are like websites, but for data. Designed with the principles of structured transparency, they enable data owners to control how their data is protected and data scientists to use data without obtaining a copy. + +PySyft supports any statistical analysis or machine learning, offering support for directly running Python code - even using third-party Python libraries. + +

Supported on:

+ +✅ Linux +✅ macOS +✅ Windows +✅ Docker +✅ Kubernetes # Quickstart -✅ `Linux` ✅ `macOS` ✅ `Windows` ✅ `Docker` ✅ `Kubernetes` +Try out your first query against a live demo Datasite! ## Install Client @@ -18,34 +32,46 @@ Perform data science on `data` that remains in `someone else's` server $ pip install -U syft[data_science] ``` +More instructions are available here. + ## Launch Server +Launch a development server directly in your Jupyter Notebook: + ```python -# from Jupyter / Python import syft as sy -sy.requires(">=0.8.8,<0.8.9") + +sy.requires(">=0.9,<0.9.1") + server = sy.orchestra.launch( name="my-datasite", port=8080, create_producer=True, n_consumers=1, - dev_mode=True, + dev_mode=False, reset=True, # resets database ) ``` +or from the command line: + ```bash -# or from the command line $ syft launch --name=my-datasite --port=8080 --reset=True Starting syft-datasite server on 0.0.0.0:8080 ``` +Datasite servers can be deployed as a single container using Docker or directly in Kubernetes. Check out our deployment guide. + ## Launch Client +Main way to use a Datasite is via our Syft client, in a Jupyter Notebook. Check out our PySyft client guide: + ```python import syft as sy -sy.requires(">=0.8.8,<0.8.9") + +sy.requires(">=0.9,<0.9.1") + datasite_client = sy.login( port=8080, email="info@openmined.org", @@ -53,321 +79,98 @@ datasite_client = sy.login( ) ``` -## PySyft in 10 minutes +## PySyft - Getting started 📝 -📝 API Example Notebooks +Learn about PySyft via our getting started guide: -- 00-load-data.ipynb -- 01-submit-code.ipynb -- 02-review-code-and-approve.ipynb -- 03-data-scientist-download-result.ipynb -- 04-pytorch-example.ipynb -- 05-custom-policy.ipynb -- 06-multiple-code-requests.ipynb -- 07-datasite-register-control-flow.ipynb -- 08-code-version.ipynb -- 09-blob-storage.ipynb -- 10-container-images.ipynb -- 11-container-images-k8s.ipynb -- 12-custom-api-endpoint.ipynb +- PySyft from the ground up +- Part 1: Datasets & Assets +- Part 2: Client and Datasite Access +- Part 3: Propose the research study +- Part 4: Review Code Requests +- Part 5: Retrieving Results -## Deploy Kubernetes Helm Chart +# PySyft In-depth -#### 0. Deploy Kubernetes +📚 Check out our docs website. -Required resources: 1 CPU and 4GB RAM. However, you will need some special instructions to deploy, please consult [these instructions](https://github.com/OpenMined/PySyft/blob/dev/notebooks/tutorials/deployments/03-deploy-k8s-k3d.ipynb) or look at the resource constraint testing [here](https://github.com/OpenMined/PySyft/pull/8828#issue-2300774645). -Recommended resources: 8+ Cores and 16GB RAM +Quick PySyft components links: -If you're using Docker Desktop to deploy your Kubernetes, you may need to go into Settings > Resources and increase CPUs and Memory. +- DataSite Server -**Note**: Assuming we have a Kubernetes cluster already setup. +- Syft Client -#### 1. Add and update Helm repo for Syft +- Datasets API (`.datasets`) -```sh -helm repo add openmined https://openmined.github.io/PySyft/helm -helm repo update openmined -``` +- Users API (`.users`) -#### 2. Search for available Syft versions +- Projects API (`.projects`) -```sh -helm search repo openmined/syft --versions --devel -``` - -#### 3. Set your preferred Syft Chart version - -```sh -SYFT_VERSION="" -``` - -#### 4. Provisioning Helm Charts - -```sh -helm install my-datasite openmined/syft --version $SYFT_VERSION --namespace syft --create-namespace --set ingress.className="traefik" -``` +- Request API (`.requests`) -### Ingress Controllers +- Code API (`.code`) -For Azure AKS +- Syft Policies API (`.policy`) -```sh -helm install ... --set ingress.className="azure-application-gateway" -``` - -For AWS EKS +- Settings API (`.settings`) -```sh -helm install ... --set ingress.className="alb" -``` +- Notifications API (`.notifications`) -For Google GKE we need the [`gce` annotation](https://cloud.google.com/kubernetes-engine/docs/how-to/load-balance-ingress#create-ingress). +- Sync API (`.sync`) -```sh -helm install ... --set ingress.class="gce" -``` +## Why use PySyft? -## Note: +In a variety of domains across society, data owners have **valid concerns about the risks associated with sharing their data**, such as legal risks, privacy invasion (_misuing the data_), or intellectual property (_copying and redistributing it_). -🚨 Our old deployment tool `HAGrid` has been `deprecated`. For the updated deployment options kindly refer to: +Datasites enable data scientists to **answer questions** without even seeing or acquiring a copy of the data, **within the data owners's definition of acceptable use**. We call this process Remote Data Science. -- 📚 Deployments +This means that the **current risks** of sharing information with someone will **no longer prevent** the vast benefits such as innovation, insights and scientific discovery. With each Datasite, data owners are able to enable `1000x more accesible data` in each scientific field and lead, together with data scientists, breakthrough innovation. -## Docs and Support +Learn more about our work on our website. -- 📚 Docs -- `#support` on Slack +## Support -# Install Notes +For questions about PySyft, reach out via `#support` on Slack. -- PySyft 0.8.6 Requires: 🐍 `python 3.10 - 3.12` - Run: `pip install -U syft` -- Syft Server Requires: 🐳 `docker` or ☸️ `kubernetes` +## Syft Versions -# Versions +:exclamation: PySyft and Syft Server must use the same `version`. -`0.9.0` (Beta) - `dev` branch 👈🏽 API - Coming soon... -`0.8.8` (Stable) - API +**Latest Stable** -Deprecated: +- `0.9.0` (Stable) - Docs +- Install PySyft (Stable): `pip install -U syft` -- `0.8.7` - API -- `0.8.6` - API -- `0.8.5-post.2` - API -- `0.8.4` - API -- `0.8.3` - API -- `0.8.2` - API -- `0.8.1` - API -- `0.8.0` - API -- `0.7.0` - Course 3 Updated -- `0.6.0` - Course 3 -- `0.5.1` - Course 2 + M1 Hotfix -- `0.2.0` - `0.5.0` +**Latest Beta** -PySyft and Syft Server use the same `version` and its best to match them up where possible. We release weekly betas which can be used in each context: +- `0.9.1` (Beta) - `dev` branch 👈🏽 +- Install PySyft (Beta): `pip install -U syft --pre` -PySyft (Stable): `pip install -U syft` - -PySyft (Beta): `pip install -U syft --pre` - -# What is Syft? - - - - Syft - - -`Syft` is OpenMined's `open source` stack that provides `secure` and `private` Data Science in Python. Syft decouples `private data` from model training, using techniques like [Federated Learning](https://ai.googleblog.com/2017/04/federated-learning-collaborative.html), [Differential Privacy](https://en.wikipedia.org/wiki/Differential_privacy), and [Encrypted Computation](https://en.wikipedia.org/wiki/Homomorphic_encryption). This is done with a `numpy`-like interface and integration with `Deep Learning` frameworks, so that you as a `Data Scientist` can maintain your current workflow while using these new `privacy-enhancing techniques`. - -### Why should I use Syft? - -`Syft` allows a `Data Scientist` to ask `questions` about a `dataset` and, within `privacy limits` set by the `data owner`, get `answers` to those `questions`, all without obtaining a `copy` of the data itself. We call this process `Remote Data Science`. It means in a wide variety of `datasites` across society, the current `risks` of sharing information (`copying` data) with someone such as, privacy invasion, IP theft and blackmail will no longer prevent the vast `benefits` such as innovation, insights and scientific discovery which secure access will provide. - -No more cold calls to get `access` to a dataset. No more weeks of `wait times` to get a `result` on your `query`. It also means `1000x more data` in every datasite. PySyft opens the doors to a streamlined Data Scientist `workflow`, all with the individual's `privacy` at its heart. - - - -# Terminology - - - - - - - - - - - - - - - - - - - - -
- -

👨🏻‍💼 Data Owners

-
- -

👩🏽‍🔬 Data Scientists

-
- - -Provide `datasets` which they would like to make available for `study` by an `outside party` they may or may not `fully trust` has good intentions. - - - - -Are end `users` who desire to perform `computations` or `answer` a specific `question` using one or more data owners' `datasets`. - -
- -

🏰 Datasite Server

-
- -

🔗 Gateway Server

-
- - -Manages the `remote study` of the data by a `Data Scientist` and allows the `Data Owner` to manage the `data` and control the `privacy guarantees` of the subjects under study. It also acts as a `gatekeeper` for the `Data Scientist's` access to the data to compute and experiment with the results. - - - - -Provides services to a group of `Data Owners` and `Data Scientists`, such as dataset `search` and bulk `project approval` (legal / technical) to participate in a project. A gateway server acts as a bridge between it's members (`Datasites`) and their subscribers (`Data Scientists`) and can provide access to a collection of `datasites` at once.
+Find more about previous releases here. # Community - - - - - - -
- -
- - - - - - - - -
-
- - - - -
- - - - - +Supported by the OpenMined Foundation, the OpenMined Community is an online network of over 17,000 technologists, researchers, and industry professionals keen to _unlock 1000x more data in every scientific field and industry_. - - -
-
+ # Courses
- +
- +
- +
@@ -377,13 +180,20 @@ Provides services to a group of `Data Owners` and `Data Scientists`, such as dat # Contributors -OpenMined and Syft appreciates all contributors, if you would like to fix a bug or suggest a new feature, please see our [guidelines](https://openmined.github.io/PySyft/developer_guide/index.html).
+OpenMined and Syft appreciates all contributors, if you would like to fix a bug or suggest a new feature, please reach out via Github or Slack! Contributors +# About OpenMined + +OpenMined is a non-profit foundation creating technology infrastructure that helps researchers get answers from data without needing a copy or direct access. Our community of technologists is building Syft. + + + # Supporters @@ -441,10 +251,6 @@ OpenMined and Syft appreciates all contributors, if you would like to fix a bug
-# Disclaimer - -Syft is under active development and is not yet ready for pilots on private data without our assistance. As early access participants, please contact us via [Slack](https://slack.openmined.org/) or email if you would like to ask a question or have a use case that you would like to discuss. - # License [Apache License 2.0](LICENSE)
diff --git a/VERSION b/VERSION index f74ed6cb9df..8ab9d6fe4f9 100644 --- a/VERSION +++ b/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.9.0-beta.3" +__version__ = "0.9.1-beta.2" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/docs/img/Syft-Logo-Light.svg b/docs/img/Syft-Logo-Light.svg new file mode 100644 index 00000000000..8d1bff88f21 --- /dev/null +++ b/docs/img/Syft-Logo-Light.svg @@ -0,0 +1,126 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/img/Syft-Logo.svg b/docs/img/Syft-Logo.svg new file mode 100644 index 00000000000..24adb15bbf7 --- /dev/null +++ b/docs/img/Syft-Logo.svg @@ -0,0 +1,126 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/notebooks/api/0.8/00-load-data.ipynb b/notebooks/api/0.8/00-load-data.ipynb index 5370bd2e595..1c8a6d3ff6a 100644 --- a/notebooks/api/0.8/00-load-data.ipynb +++ b/notebooks/api/0.8/00-load-data.ipynb @@ -30,7 +30,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] @@ -732,7 +732,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.11.5" }, "toc": { "base_numbering": 1, diff --git a/notebooks/api/0.8/01-submit-code.ipynb b/notebooks/api/0.8/01-submit-code.ipynb index 74bb381b81d..d559749e5d3 100644 --- a/notebooks/api/0.8/01-submit-code.ipynb +++ b/notebooks/api/0.8/01-submit-code.ipynb @@ -22,7 +22,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/api/0.8/02-review-code-and-approve.ipynb b/notebooks/api/0.8/02-review-code-and-approve.ipynb index 0d1f81dc9ef..dc0327dc306 100644 --- a/notebooks/api/0.8/02-review-code-and-approve.ipynb +++ b/notebooks/api/0.8/02-review-code-and-approve.ipynb @@ -22,7 +22,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/api/0.8/03-data-scientist-download-result.ipynb b/notebooks/api/0.8/03-data-scientist-download-result.ipynb index 47b6cfe001a..0e1adc77ed3 100644 --- a/notebooks/api/0.8/03-data-scientist-download-result.ipynb +++ b/notebooks/api/0.8/03-data-scientist-download-result.ipynb @@ -22,7 +22,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/api/0.8/04-pytorch-example.ipynb b/notebooks/api/0.8/04-pytorch-example.ipynb index dcfc34acbc2..66cb5a7df54 100644 --- a/notebooks/api/0.8/04-pytorch-example.ipynb +++ b/notebooks/api/0.8/04-pytorch-example.ipynb @@ -9,7 +9,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/api/0.8/05-custom-policy.ipynb b/notebooks/api/0.8/05-custom-policy.ipynb index 819d7fc934e..9854efe851e 100644 --- a/notebooks/api/0.8/05-custom-policy.ipynb +++ b/notebooks/api/0.8/05-custom-policy.ipynb @@ -9,7 +9,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/api/0.8/06-multiple-code-requests.ipynb b/notebooks/api/0.8/06-multiple-code-requests.ipynb index 85f4fe0b88d..70ce3d055bf 100644 --- a/notebooks/api/0.8/06-multiple-code-requests.ipynb +++ b/notebooks/api/0.8/06-multiple-code-requests.ipynb @@ -9,7 +9,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/api/0.8/07-datasite-register-control-flow.ipynb b/notebooks/api/0.8/07-datasite-register-control-flow.ipynb index 3f19b01311a..d4c10dd0cae 100644 --- a/notebooks/api/0.8/07-datasite-register-control-flow.ipynb +++ b/notebooks/api/0.8/07-datasite-register-control-flow.ipynb @@ -25,7 +25,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/api/0.8/08-code-version.ipynb b/notebooks/api/0.8/08-code-version.ipynb index b67ffb54f0d..c85361f7a28 100644 --- a/notebooks/api/0.8/08-code-version.ipynb +++ b/notebooks/api/0.8/08-code-version.ipynb @@ -20,7 +20,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "%pip install {package_string} -q" ] diff --git a/notebooks/api/0.8/09-blob-storage.ipynb b/notebooks/api/0.8/09-blob-storage.ipynb index 57c1837effd..de15073ee36 100644 --- a/notebooks/api/0.8/09-blob-storage.ipynb +++ b/notebooks/api/0.8/09-blob-storage.ipynb @@ -6,7 +6,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/api/0.8/10-container-images.ipynb b/notebooks/api/0.8/10-container-images.ipynb index 91a6dc34918..72eb72c367d 100644 --- a/notebooks/api/0.8/10-container-images.ipynb +++ b/notebooks/api/0.8/10-container-images.ipynb @@ -7,7 +7,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'" ] }, @@ -149,7 +149,8 @@ " \"local-dev\"\n", " if (bool(os.environ[\"DEV_MODE\"]) and running_as_container)\n", " else sy.__version__\n", - ")" + ")\n", + "syft_base_worker_tag = \"0.9.0-beta.5\"" ] }, { diff --git a/notebooks/api/0.8/11-container-images-k8s.ipynb b/notebooks/api/0.8/11-container-images-k8s.ipynb index 68620d9c114..2a6ca77dabb 100644 --- a/notebooks/api/0.8/11-container-images-k8s.ipynb +++ b/notebooks/api/0.8/11-container-images-k8s.ipynb @@ -7,7 +7,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'" ] }, diff --git a/notebooks/api/0.8/13-forgot-user-password.ipynb b/notebooks/api/0.8/13-forgot-user-password.ipynb new file mode 100644 index 00000000000..8ad3cdf0918 --- /dev/null +++ b/notebooks/api/0.8/13-forgot-user-password.ipynb @@ -0,0 +1,181 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "id": "0", + "metadata": {}, + "source": [ + "# Forgot User Password" + ] + }, + { + "cell_type": "markdown", + "id": "1", + "metadata": {}, + "source": [ + "## Initialize the server" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "2", + "metadata": {}, + "outputs": [], + "source": [ + "# stdlib\n", + "\n", + "# syft absolute\n", + "import syft as sy\n", + "from syft import SyftError\n", + "from syft import SyftSuccess\n", + "\n", + "server = sy.orchestra.launch(\n", + " name=\"test-datasite-1\",\n", + " dev_mode=True,\n", + " create_producer=True,\n", + " n_consumers=3,\n", + " reset=True,\n", + " port=8081,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "id": "3", + "metadata": {}, + "source": [ + "## Register a new user" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "4", + "metadata": {}, + "outputs": [], + "source": [ + "datasite_client = server.login(email=\"info@openmined.org\", password=\"changethis\")\n", + "res = datasite_client.register(\n", + " email=\"new_syft_user@openmined.org\",\n", + " password=\"verysecurepassword\",\n", + " password_verify=\"verysecurepassword\",\n", + " name=\"New User\",\n", + ")\n", + "\n", + "if not isinstance(res, SyftSuccess):\n", + " raise Exception(f\"Res isn't SyftSuccess, its {res}\")" + ] + }, + { + "cell_type": "markdown", + "id": "5", + "metadata": {}, + "source": [ + "### Ask for a password reset - Notifier disabled Workflow" + ] + }, + { + "cell_type": "markdown", + "id": "6", + "metadata": {}, + "source": [ + "### Call for users.forgot_password" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "7", + "metadata": {}, + "outputs": [], + "source": [ + "guest_client = server.login_as_guest()\n", + "res = guest_client.users.forgot_password(email=\"new_syft_user@openmined.org\")\n", + "\n", + "if not isinstance(res, SyftSuccess):\n", + " raise Exception(f\"Res isn't SyftSuccess, its {res}\")" + ] + }, + { + "cell_type": "markdown", + "id": "8", + "metadata": {}, + "source": [ + "### Admin generates a temp token" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "9", + "metadata": {}, + "outputs": [], + "source": [ + "temp_token = datasite_client.users.request_password_reset(\n", + " datasite_client.notifications[-1].linked_obj.resolve.id\n", + ")\n", + "\n", + "if not isinstance(temp_token, str):\n", + " raise Exception(f\"temp_token isn't a string, its {temp_token}\")" + ] + }, + { + "cell_type": "markdown", + "id": "10", + "metadata": {}, + "source": [ + "### User use this token to reset password" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "11", + "metadata": {}, + "outputs": [], + "source": [ + "res = guest_client.users.reset_password(token=temp_token, new_password=\"Password123\")\n", + "\n", + "if not isinstance(res, SyftSuccess):\n", + " raise Exception(f\"Res isn't SyftSuccess, its {res}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "12", + "metadata": {}, + "outputs": [], + "source": [ + "new_user_session = server.login(\n", + " email=\"new_syft_user@openmined.org\", password=\"Password123\"\n", + ")\n", + "\n", + "if isinstance(new_user_session, SyftError):\n", + " raise Exception(f\"Res isn't SyftSuccess, its {new_user_session}\")" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.4" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb b/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb index 1803bc2257c..71567b558b0 100644 --- a/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb +++ b/notebooks/tutorials/data-owner/01-uploading-private-data.ipynb @@ -25,7 +25,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/data-owner/02-account-management.ipynb b/notebooks/tutorials/data-owner/02-account-management.ipynb index 4f145816565..66e01be2644 100644 --- a/notebooks/tutorials/data-owner/02-account-management.ipynb +++ b/notebooks/tutorials/data-owner/02-account-management.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb b/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb index 398862601f2..40868ce167d 100644 --- a/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb +++ b/notebooks/tutorials/data-owner/03-messages-and-requests.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/data-owner/05-syft-services-api.ipynb b/notebooks/tutorials/data-owner/05-syft-services-api.ipynb index 760ec6c481d..d891bc16cdf 100644 --- a/notebooks/tutorials/data-owner/05-syft-services-api.ipynb +++ b/notebooks/tutorials/data-owner/05-syft-services-api.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb b/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb index 731123c88d4..727928b60d8 100644 --- a/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb +++ b/notebooks/tutorials/data-scientist/03-working-with-private-datasets.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/data-scientist/04-syft-functions.ipynb b/notebooks/tutorials/data-scientist/04-syft-functions.ipynb index 64812ef5333..b0787b96356 100644 --- a/notebooks/tutorials/data-scientist/04-syft-functions.ipynb +++ b/notebooks/tutorials/data-scientist/04-syft-functions.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] @@ -206,7 +206,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/data-scientist/05-messaging-and-requests.ipynb b/notebooks/tutorials/data-scientist/05-messaging-and-requests.ipynb index fa8cfc445ba..68aa03d284a 100644 --- a/notebooks/tutorials/data-scientist/05-messaging-and-requests.ipynb +++ b/notebooks/tutorials/data-scientist/05-messaging-and-requests.ipynb @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb index b710b59a92f..cc2da4bf58f 100644 --- a/notebooks/tutorials/hello-syft/01-hello-syft.ipynb +++ b/notebooks/tutorials/hello-syft/01-hello-syft.ipynb @@ -56,7 +56,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb b/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb index 1fd95c66ebb..e97442b5dac 100644 --- a/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb +++ b/notebooks/tutorials/model-auditing/colab/01-user-log.ipynb @@ -44,7 +44,7 @@ "outputs": [], "source": [ "# install syft\n", - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "%pip install {package_string} -q" ] diff --git a/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb b/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb index adca3805b12..2d5078fa614 100644 --- a/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb +++ b/notebooks/tutorials/model-training/00-data-owner-upload-data.ipynb @@ -386,7 +386,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.10.9" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb b/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb index 13e52c83015..c66092b49b4 100644 --- a/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb +++ b/notebooks/tutorials/model-training/01-data-scientist-submit-code.ipynb @@ -548,7 +548,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.10.9" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb b/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb index 5606ec79111..f4a75cb669b 100644 --- a/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb +++ b/notebooks/tutorials/model-training/02-data-owner-review-approve-code.ipynb @@ -296,7 +296,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.10.9" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb b/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb index 250a9f23dcc..2277e6ad2f2 100644 --- a/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb +++ b/notebooks/tutorials/model-training/03-data-scientist-download-results.ipynb @@ -282,7 +282,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.10.9" }, "toc": { "base_numbering": 1, diff --git a/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb b/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb index df6b188c0c4..c0e76617809 100644 --- a/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb +++ b/notebooks/tutorials/pandas-cookbook/01-reading-from-a-csv.ipynb @@ -25,7 +25,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb b/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb index bdb331d2f4e..434741c19fc 100644 --- a/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb +++ b/notebooks/tutorials/pandas-cookbook/02-selecting-data-finding-common-complain.ipynb @@ -25,7 +25,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb b/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb index 7dc6a47b948..c81eb08e469 100644 --- a/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb +++ b/notebooks/tutorials/pandas-cookbook/03-which-borough-has-the-most-noise-complaints.ipynb @@ -28,7 +28,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb b/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb index 1d065baae95..3f99c0b2cf9 100644 --- a/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb +++ b/notebooks/tutorials/pandas-cookbook/04-weekday-bike-most-groupby-aggregate.ipynb @@ -17,7 +17,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb b/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb index 8bece628ede..0878c0a9cfe 100644 --- a/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb +++ b/notebooks/tutorials/pandas-cookbook/05-combining-dataframes-scraping-weather-data.ipynb @@ -25,7 +25,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb b/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb index 2fc567f9be9..d7c4cafd3d0 100644 --- a/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb +++ b/notebooks/tutorials/pandas-cookbook/06-string-operations-which-month-was-the-snowiest.ipynb @@ -25,7 +25,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb b/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb index bbbfe46751e..303e0a808d4 100644 --- a/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb +++ b/notebooks/tutorials/pandas-cookbook/07-cleaning-up-messy-data.ipynb @@ -25,7 +25,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb b/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb index 153ec4d16e0..2e40e221bfb 100644 --- a/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb +++ b/notebooks/tutorials/pandas-cookbook/08-how-to-deal-with-timestamps.ipynb @@ -27,7 +27,7 @@ }, "outputs": [], "source": [ - "SYFT_VERSION = \">=0.8.2.b0,<0.9\"\n", + "SYFT_VERSION = \">=0.9,<1.0.0\"\n", "package_string = f'\"syft{SYFT_VERSION}\"'\n", "# %pip install {package_string} -q" ] diff --git a/packages/grid/VERSION b/packages/grid/VERSION index f74ed6cb9df..8ab9d6fe4f9 100644 --- a/packages/grid/VERSION +++ b/packages/grid/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.9.0-beta.3" +__version__ = "0.9.1-beta.2" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/grid/backend/grid/images/worker_cpu.dockerfile b/packages/grid/backend/grid/images/worker_cpu.dockerfile index 3f7baa8520f..5349e544b19 100644 --- a/packages/grid/backend/grid/images/worker_cpu.dockerfile +++ b/packages/grid/backend/grid/images/worker_cpu.dockerfile @@ -5,7 +5,7 @@ # NOTE: This dockerfile will be built inside a syft-backend container in PROD # Hence COPY will not work the same way in DEV vs. PROD -ARG SYFT_VERSION_TAG="0.9.0-beta.3" +ARG SYFT_VERSION_TAG="0.9.1-beta.2" FROM openmined/syft-backend:${SYFT_VERSION_TAG} # should match base image python version diff --git a/packages/grid/devspace.yaml b/packages/grid/devspace.yaml index cfa734a6928..f19ba02c00b 100644 --- a/packages/grid/devspace.yaml +++ b/packages/grid/devspace.yaml @@ -28,7 +28,7 @@ vars: DOCKER_IMAGE_RATHOLE: openmined/syft-rathole DOCKER_IMAGE_ENCLAVE_ATTESTATION: openmined/syft-enclave-attestation CONTAINER_REGISTRY: "docker.io" - VERSION: "0.9.0-beta.3" + VERSION: "0.9.1-beta.2" PLATFORM: $(uname -m | grep -q 'arm64' && echo "arm64" || echo "amd64") # This is a list of `images` that DevSpace can build for this project diff --git a/packages/grid/frontend/package.json b/packages/grid/frontend/package.json index 3fc6edd48af..1c773ff46dc 100644 --- a/packages/grid/frontend/package.json +++ b/packages/grid/frontend/package.json @@ -1,6 +1,6 @@ { "name": "syft-ui", - "version": "0.9.0-beta.3", + "version": "0.9.1-beta.2", "private": true, "scripts": { "dev": "pnpm i && vite dev --host --port 80", diff --git a/packages/grid/helm/repo/index.yaml b/packages/grid/helm/repo/index.yaml index 0632a030e55..9e76bf9ab4f 100644 --- a/packages/grid/helm/repo/index.yaml +++ b/packages/grid/helm/repo/index.yaml @@ -1,9 +1,74 @@ apiVersion: v1 entries: syft: + - apiVersion: v2 + appVersion: 0.9.1-beta.2 + created: "2024-08-04T12:36:40.343114522Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 6f605af4ffc0e42a0733593faf1b5e588bbe58ff9f49b903a41bd4a751ddb694 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.9.1-beta.2.tgz + version: 0.9.1-beta.2 + - apiVersion: v2 + appVersion: 0.9.1-beta.1 + created: "2024-08-04T12:36:40.342414484Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 9c99243e63888391654f23044144e2095dee48a599cd4b2e4f43ead6f76a8572 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.9.1-beta.1.tgz + version: 0.9.1-beta.1 + - apiVersion: v2 + appVersion: 0.9.0 + created: "2024-08-04T12:36:40.341707822Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: baf218c8543a2525f7d4cced1e49b0d4e38ee1661d7171a55a069bf765b5b6d8 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.9.0.tgz + version: 0.9.0 + - apiVersion: v2 + appVersion: 0.9.0-beta.5 + created: "2024-08-04T12:36:40.340974261Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: a4eafd04b39b0c75d6a28ed2f7cfece450150477dc2c6a01e10e2087a5b02835 + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.9.0-beta.5.tgz + version: 0.9.0-beta.5 + - apiVersion: v2 + appVersion: 0.9.0-beta.4 + created: "2024-08-04T12:36:40.340262501Z" + description: Perform numpy-like analysis on data that remains in someone elses + server + digest: 5a3cd3dd57609231ffc13e6af8d55f68b1b79fbbe8261740db957526fb8a536a + home: https://github.com/OpenMined/PySyft/ + icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png + name: syft + type: application + urls: + - https://openmined.github.io/PySyft/helm/syft-0.9.0-beta.4.tgz + version: 0.9.0-beta.4 - apiVersion: v2 appVersion: 0.9.0-beta.3 - created: "2024-07-30T07:52:02.555199795Z" + created: "2024-08-04T12:36:40.33951264Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: affe0898286720a0281c2363bed404a09d229a5359951b4dfdd8e746d628b4cb @@ -16,7 +81,7 @@ entries: version: 0.9.0-beta.3 - apiVersion: v2 appVersion: 0.9.0-beta.2 - created: "2024-07-30T07:52:02.554486414Z" + created: "2024-08-04T12:36:40.338811439Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 105b60f0ff01f50386d2b063cb58c0e91ee41b74cefee7bca3f56e4025c38dd1 @@ -29,7 +94,7 @@ entries: version: 0.9.0-beta.2 - apiVersion: v2 appVersion: 0.9.0-beta.1 - created: "2024-07-30T07:52:02.553765819Z" + created: "2024-08-04T12:36:40.338101953Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 10246075684d168e6a51c009581b77df8d729e29e11abc4a360fae42659a6409 @@ -42,7 +107,7 @@ entries: version: 0.9.0-beta.1 - apiVersion: v2 appVersion: 0.8.8 - created: "2024-07-30T07:52:02.553000261Z" + created: "2024-08-04T12:36:40.337373492Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 46f75bdf8c39e0f17de266bf19b64852e0dbf7f7bcea60bf7a19018ff17370ad @@ -55,7 +120,7 @@ entries: version: 0.8.8 - apiVersion: v2 appVersion: 0.8.8-beta.4 - created: "2024-07-30T07:52:02.551541399Z" + created: "2024-08-04T12:36:40.336075354Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cc0a3b49df19435a407e4764be6c5748511f14273e668e7f1d326af28b29f22a @@ -68,7 +133,7 @@ entries: version: 0.8.8-beta.4 - apiVersion: v2 appVersion: 0.8.8-beta.3 - created: "2024-07-30T07:52:02.550844138Z" + created: "2024-08-04T12:36:40.335195471Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: de2fba39516e98be39ae0110a2cfa5bfa2b665d7a35a4516b43c5310bbf621dc @@ -81,7 +146,7 @@ entries: version: 0.8.8-beta.3 - apiVersion: v2 appVersion: 0.8.8-beta.2 - created: "2024-07-30T07:52:02.550142499Z" + created: "2024-08-04T12:36:40.334487909Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1323f4082c65944b522cd8e36dc7285c83c7dfcf6a56f7962665a8b1256a4d09 @@ -94,7 +159,7 @@ entries: version: 0.8.8-beta.2 - apiVersion: v2 appVersion: 0.8.8-beta.1 - created: "2024-07-30T07:52:02.549409011Z" + created: "2024-08-04T12:36:40.333779625Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ec027b50b8182ef656be14ddca9537785c37712a4be8cb940f30ac029b63de2d @@ -107,7 +172,7 @@ entries: version: 0.8.8-beta.1 - apiVersion: v2 appVersion: 0.8.7 - created: "2024-07-30T07:52:02.54871199Z" + created: "2024-08-04T12:36:40.333032849Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7ea7f63d1c6d0948860547f8aa39343fc5ef399c8e62d9d7edd4473cf44d8186 @@ -120,7 +185,7 @@ entries: version: 0.8.7 - apiVersion: v2 appVersion: 0.8.7-beta.16 - created: "2024-07-30T07:52:02.541931969Z" + created: "2024-08-04T12:36:40.326222826Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75190eae57b64c2c47ab4a7fe3c6e94f35eb8045807a843ec8d7b26585c9e840 @@ -133,7 +198,7 @@ entries: version: 0.8.7-beta.16 - apiVersion: v2 appVersion: 0.8.7-beta.15 - created: "2024-07-30T07:52:02.540990643Z" + created: "2024-08-04T12:36:40.325389009Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56879d9a9f10febce88676d3d20621d74d17f9e33f5df6ae1e9bc3078c216f0c @@ -146,7 +211,7 @@ entries: version: 0.8.7-beta.15 - apiVersion: v2 appVersion: 0.8.7-beta.14 - created: "2024-07-30T07:52:02.540152078Z" + created: "2024-08-04T12:36:40.324546957Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e7cbca1d603ba11e09ae2a3089cfdafaa08cfa07c553c4f0fb8b42f8d3028f7 @@ -159,7 +224,7 @@ entries: version: 0.8.7-beta.14 - apiVersion: v2 appVersion: 0.8.7-beta.13 - created: "2024-07-30T07:52:02.539284649Z" + created: "2024-08-04T12:36:40.323634304Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1dbe3ecdfec57bf25020cbcff783fab908f0eb0640ad684470b2fd1da1928005 @@ -172,7 +237,7 @@ entries: version: 0.8.7-beta.13 - apiVersion: v2 appVersion: 0.8.7-beta.12 - created: "2024-07-30T07:52:02.538557312Z" + created: "2024-08-04T12:36:40.322930629Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e92b2f3a522dabb3a79ff762a7042ae16d2bf3a53eebbb2885a69b9f834d109c @@ -185,7 +250,7 @@ entries: version: 0.8.7-beta.12 - apiVersion: v2 appVersion: 0.8.7-beta.11 - created: "2024-07-30T07:52:02.537044321Z" + created: "2024-08-04T12:36:40.322224158Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 099f6cbd44b699ee2410a4be012ed1a8a65bcacb06a43057b2779d7fe34fc0ad @@ -198,7 +263,7 @@ entries: version: 0.8.7-beta.11 - apiVersion: v2 appVersion: 0.8.7-beta.10 - created: "2024-07-30T07:52:02.536350606Z" + created: "2024-08-04T12:36:40.321504152Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 00773cb241522e281c1915339fc362e047650e08958a736e93d6539f44cb5e25 @@ -211,7 +276,7 @@ entries: version: 0.8.7-beta.10 - apiVersion: v2 appVersion: 0.8.7-beta.9 - created: "2024-07-30T07:52:02.547873315Z" + created: "2024-08-04T12:36:40.33219259Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a3f8e85d9ddef7a644b959fcc2fcb0fc08f7b6abae1045e893d0d62fa4ae132e @@ -224,7 +289,7 @@ entries: version: 0.8.7-beta.9 - apiVersion: v2 appVersion: 0.8.7-beta.8 - created: "2024-07-30T07:52:02.547221759Z" + created: "2024-08-04T12:36:40.331504805Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a422ac88d8fd1fb80d5004d5eb6e95fa9efc7f6a87da12e5ac04829da7f04c4d @@ -237,7 +302,7 @@ entries: version: 0.8.7-beta.8 - apiVersion: v2 appVersion: 0.8.7-beta.7 - created: "2024-07-30T07:52:02.546554715Z" + created: "2024-08-04T12:36:40.330827899Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0dc313a1092e6256a7c8aad002c8ec380b3add2c289d680db1e238a336399b7a @@ -250,7 +315,7 @@ entries: version: 0.8.7-beta.7 - apiVersion: v2 appVersion: 0.8.7-beta.6 - created: "2024-07-30T07:52:02.545905463Z" + created: "2024-08-04T12:36:40.330193342Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 052a2ec1102d2a4c9915f95647abd4a6012f56fa05a106f4952ee9b55bf7bae8 @@ -263,7 +328,7 @@ entries: version: 0.8.7-beta.6 - apiVersion: v2 appVersion: 0.8.7-beta.5 - created: "2024-07-30T07:52:02.544434458Z" + created: "2024-08-04T12:36:40.329554617Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1728af756907c3fcbe87c2fd2de014a2d963c22a4c2eb6af6596b525a9b9a18a @@ -276,7 +341,7 @@ entries: version: 0.8.7-beta.5 - apiVersion: v2 appVersion: 0.8.7-beta.4 - created: "2024-07-30T07:52:02.543810193Z" + created: "2024-08-04T12:36:40.328896987Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 387a57a3904a05ed61e92ee48605ef6fd5044ff7e822e0924e0d4c485e2c88d2 @@ -289,7 +354,7 @@ entries: version: 0.8.7-beta.4 - apiVersion: v2 appVersion: 0.8.7-beta.3 - created: "2024-07-30T07:52:02.543190437Z" + created: "2024-08-04T12:36:40.327453429Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 3668002b7a4118516b2ecd61d6275f60d83fc12841587ab8f62e1c1200731c67 @@ -302,7 +367,7 @@ entries: version: 0.8.7-beta.3 - apiVersion: v2 appVersion: 0.8.7-beta.2 - created: "2024-07-30T07:52:02.542557446Z" + created: "2024-08-04T12:36:40.326815235Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e62217ffcadee2b8896ab0543f9ccc42f2df898fd979438ac9376d780b802af7 @@ -315,7 +380,7 @@ entries: version: 0.8.7-beta.2 - apiVersion: v2 appVersion: 0.8.7-beta.1 - created: "2024-07-30T07:52:02.535679053Z" + created: "2024-08-04T12:36:40.320411617Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 553981fe1d5c980e6903b3ff2f1b9b97431f6dd8aee91e3976bcc5594285235e @@ -328,7 +393,7 @@ entries: version: 0.8.7-beta.1 - apiVersion: v2 appVersion: 0.8.6 - created: "2024-07-30T07:52:02.53516825Z" + created: "2024-08-04T12:36:40.319425518Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ddbbe6fea1702e57404875eb3019a3b1a341017bdbb5fbc6ce418507e5c15756 @@ -341,7 +406,7 @@ entries: version: 0.8.6 - apiVersion: v2 appVersion: 0.8.6-beta.1 - created: "2024-07-30T07:52:02.534628343Z" + created: "2024-08-04T12:36:40.318883071Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: cc2c81ef6796ac853dce256e6bf8a6af966c21803e6534ea21920af681c62e61 @@ -354,7 +419,7 @@ entries: version: 0.8.6-beta.1 - apiVersion: v2 appVersion: 0.8.5 - created: "2024-07-30T07:52:02.534086482Z" + created: "2024-08-04T12:36:40.318341967Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: db5d90d44006209fd5ecdebd88f5fd56c70f7c76898343719a0ff8da46da948a @@ -367,7 +432,7 @@ entries: version: 0.8.5 - apiVersion: v2 appVersion: 0.8.5-post.2 - created: "2024-07-30T07:52:02.533282402Z" + created: "2024-08-04T12:36:40.317580555Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ea3f7269b55f773fa165d7008c054b7cf3ec4c62eb40a96f08cd3a9b77fd2165 @@ -380,7 +445,7 @@ entries: version: 0.8.5-post.2 - apiVersion: v2 appVersion: 0.8.5-post.1 - created: "2024-07-30T07:52:02.532699474Z" + created: "2024-08-04T12:36:40.317040453Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9deb844d3dc2d8480c60f8c631dcc7794adfb39cec3aa3b1ce22ea26fdf87d02 @@ -393,7 +458,7 @@ entries: version: 0.8.5-post.1 - apiVersion: v2 appVersion: 0.8.5-beta.10 - created: "2024-07-30T07:52:02.524984229Z" + created: "2024-08-04T12:36:40.309245015Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9cfe01e8f57eca462261a24a805b41509be2de9a0fee76e331d124ed98c4bc49 @@ -406,7 +471,7 @@ entries: version: 0.8.5-beta.10 - apiVersion: v2 appVersion: 0.8.5-beta.9 - created: "2024-07-30T07:52:02.531873443Z" + created: "2024-08-04T12:36:40.316274863Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 057f1733f2bc966e15618f62629315c8207773ef6211c79c4feb557dae15c32b @@ -419,7 +484,7 @@ entries: version: 0.8.5-beta.9 - apiVersion: v2 appVersion: 0.8.5-beta.8 - created: "2024-07-30T07:52:02.531097445Z" + created: "2024-08-04T12:36:40.315474989Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 921cbce836c3032ef62b48cc82b5b4fcbe44fb81d473cf4d69a4bf0f806eb298 @@ -432,7 +497,7 @@ entries: version: 0.8.5-beta.8 - apiVersion: v2 appVersion: 0.8.5-beta.7 - created: "2024-07-30T07:52:02.529642591Z" + created: "2024-08-04T12:36:40.314711021Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 75482e955b2b9853a80bd653afb1d56535f78f3bfb7726798522307eb3effbbd @@ -445,7 +510,7 @@ entries: version: 0.8.5-beta.7 - apiVersion: v2 appVersion: 0.8.5-beta.6 - created: "2024-07-30T07:52:02.528891289Z" + created: "2024-08-04T12:36:40.313894427Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6a2dfaf65ca855e1b3d7b966d4ff291e6fcbe761e2fc2a78033211ccd3a75de0 @@ -458,7 +523,7 @@ entries: version: 0.8.5-beta.6 - apiVersion: v2 appVersion: 0.8.5-beta.5 - created: "2024-07-30T07:52:02.528139616Z" + created: "2024-08-04T12:36:40.313104762Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fead03823bef04d66901d563aa755c68ab277f72b126aaa6f0dce76a6f3bdb6d @@ -471,7 +536,7 @@ entries: version: 0.8.5-beta.5 - apiVersion: v2 appVersion: 0.8.5-beta.4 - created: "2024-07-30T07:52:02.527372726Z" + created: "2024-08-04T12:36:40.311515457Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 93e4539d5726a7fd0d6a3e93d1c17c6a358a923ddc01d102eab22f37377502ab @@ -484,7 +549,7 @@ entries: version: 0.8.5-beta.4 - apiVersion: v2 appVersion: 0.8.5-beta.3 - created: "2024-07-30T07:52:02.52654962Z" + created: "2024-08-04T12:36:40.310758733Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: f91e9390edf3441469048f5da646099af98f8b6d199409d0e2c1e6da3a51f054 @@ -497,7 +562,7 @@ entries: version: 0.8.5-beta.3 - apiVersion: v2 appVersion: 0.8.5-beta.2 - created: "2024-07-30T07:52:02.525785675Z" + created: "2024-08-04T12:36:40.310003342Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 59159c3aa4888038edc3c0135c83402363d7a0639fe62966a1e9d4928a364fa8 @@ -510,7 +575,7 @@ entries: version: 0.8.5-beta.2 - apiVersion: v2 appVersion: 0.8.5-beta.1 - created: "2024-07-30T07:52:02.524197541Z" + created: "2024-08-04T12:36:40.308468464Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 65aeb74c52ed8ba5474af500b4c1188a570ee4cb1f2a2da356b3488d28356ed9 @@ -522,7 +587,7 @@ entries: version: 0.8.5-beta.1 - apiVersion: v2 appVersion: 0.8.4 - created: "2024-07-30T07:52:02.523777909Z" + created: "2024-08-04T12:36:40.308042083Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 08afea8e3a9eef225b7e611f0bc1216c140053ef8e51439b02337faeac621fd0 @@ -534,7 +599,7 @@ entries: version: 0.8.4 - apiVersion: v2 appVersion: 0.8.4-beta.31 - created: "2024-07-30T07:52:02.520538172Z" + created: "2024-08-04T12:36:40.305506119Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: fabf3e2f37e53fa623f5d3d99b00feae06e278e5cd63bce419089946312ab1fc @@ -546,7 +611,7 @@ entries: version: 0.8.4-beta.31 - apiVersion: v2 appVersion: 0.8.4-beta.30 - created: "2024-07-30T07:52:02.52011886Z" + created: "2024-08-04T12:36:40.304520497Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6e8f792709f73ec14eab48a268bdf50a4505b340bd142cddd7c7bfffd94009ad @@ -558,7 +623,7 @@ entries: version: 0.8.4-beta.30 - apiVersion: v2 appVersion: 0.8.4-beta.29 - created: "2024-07-30T07:52:02.519367047Z" + created: "2024-08-04T12:36:40.303512276Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 4c985d6a9b3456769c4013f9e85e7374c0f963d2d27627e61f914f5537de1971 @@ -570,7 +635,7 @@ entries: version: 0.8.4-beta.29 - apiVersion: v2 appVersion: 0.8.4-beta.28 - created: "2024-07-30T07:52:02.51896133Z" + created: "2024-08-04T12:36:40.303104229Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: bd2aa3c92c768c47c502e31a326f341addcb34e64d22cdcbf5cc3f19689d859c @@ -582,7 +647,7 @@ entries: version: 0.8.4-beta.28 - apiVersion: v2 appVersion: 0.8.4-beta.27 - created: "2024-07-30T07:52:02.518551395Z" + created: "2024-08-04T12:36:40.302693868Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: e8ad0869993af39d7adda8cb868dc0b24cfb63b4bb9820dc579939c1007a60ba @@ -594,7 +659,7 @@ entries: version: 0.8.4-beta.27 - apiVersion: v2 appVersion: 0.8.4-beta.26 - created: "2024-07-30T07:52:02.518138565Z" + created: "2024-08-04T12:36:40.30227451Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 30dccf630aa25a86a03c67572fe5411687d8ce6d58def448ea10efdba2b85e3a @@ -606,7 +671,7 @@ entries: version: 0.8.4-beta.26 - apiVersion: v2 appVersion: 0.8.4-beta.25 - created: "2024-07-30T07:52:02.517708863Z" + created: "2024-08-04T12:36:40.301864319Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b6e2043bcf5a0335967d770c7939f5a7832955359a7d871c90b265660ff26e5f @@ -618,7 +683,7 @@ entries: version: 0.8.4-beta.25 - apiVersion: v2 appVersion: 0.8.4-beta.24 - created: "2024-07-30T07:52:02.517258292Z" + created: "2024-08-04T12:36:40.301448939Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b19efa95394d50bb8d76da6ec306de5d3bb9ea55371fafea95a1282a697fa33e @@ -630,7 +695,7 @@ entries: version: 0.8.4-beta.24 - apiVersion: v2 appVersion: 0.8.4-beta.23 - created: "2024-07-30T07:52:02.516825515Z" + created: "2024-08-04T12:36:40.301017368Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 5c5d05c15bff548574896118ce92335ae10c5b78f5307fe9b2618e5a5aa71a5c @@ -642,7 +707,7 @@ entries: version: 0.8.4-beta.23 - apiVersion: v2 appVersion: 0.8.4-beta.22 - created: "2024-07-30T07:52:02.516242358Z" + created: "2024-08-04T12:36:40.300597329Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0160dbce938198132ca9cd2a5cb362816344687291f5b6d7cf6de8f2855e9414 @@ -654,7 +719,7 @@ entries: version: 0.8.4-beta.22 - apiVersion: v2 appVersion: 0.8.4-beta.21 - created: "2024-07-30T07:52:02.515282758Z" + created: "2024-08-04T12:36:40.30016673Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7dce153d2fcae7513e9c132e139b2721fd975ea3cc43a370e34dbeb2a1b7f683 @@ -666,7 +731,7 @@ entries: version: 0.8.4-beta.21 - apiVersion: v2 appVersion: 0.8.4-beta.20 - created: "2024-07-30T07:52:02.514870298Z" + created: "2024-08-04T12:36:40.299703541Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c51189a187bbf24135382e25cb00964e0330dfcd3b2f0c884581a6686f05dd28 @@ -678,7 +743,7 @@ entries: version: 0.8.4-beta.20 - apiVersion: v2 appVersion: 0.8.4-beta.19 - created: "2024-07-30T07:52:02.513887054Z" + created: "2024-08-04T12:36:40.298557875Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 8219575dedb42fa2ddbf2768a4e9afbfacbc2dff7e953d77c7b10a41b78dc687 @@ -690,7 +755,7 @@ entries: version: 0.8.4-beta.19 - apiVersion: v2 appVersion: 0.8.4-beta.18 - created: "2024-07-30T07:52:02.51344515Z" + created: "2024-08-04T12:36:40.297551467Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 6418cde559cf12f1f7fea5a2b123bba950e50eeb3be002441827d2ab7f9e4ef7 @@ -702,7 +767,7 @@ entries: version: 0.8.4-beta.18 - apiVersion: v2 appVersion: 0.8.4-beta.16 - created: "2024-07-30T07:52:02.513035806Z" + created: "2024-08-04T12:36:40.297146476Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 9c9840a7c9476dbb08e0ac83926330718fe50c89879752dd8f92712b036109c0 @@ -714,7 +779,7 @@ entries: version: 0.8.4-beta.16 - apiVersion: v2 appVersion: 0.8.4-beta.15 - created: "2024-07-30T07:52:02.512631241Z" + created: "2024-08-04T12:36:40.296740853Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0955fd22da028315e30c68132cbfa4bdc82bae622039bcfce0de339707bb82eb @@ -726,7 +791,7 @@ entries: version: 0.8.4-beta.15 - apiVersion: v2 appVersion: 0.8.4-beta.14 - created: "2024-07-30T07:52:02.512188014Z" + created: "2024-08-04T12:36:40.296329841Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 56208571956abe20ed7a5cc1867cab2667ed792c63e53d0e8bb70a9b438b7bf6 @@ -738,7 +803,7 @@ entries: version: 0.8.4-beta.14 - apiVersion: v2 appVersion: 0.8.4-beta.13 - created: "2024-07-30T07:52:02.511832842Z" + created: "2024-08-04T12:36:40.295942362Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: d7222c72412b6ee5833fbb07d2549be179cdfc7ccd89e0ad947d112fce799b83 @@ -750,7 +815,7 @@ entries: version: 0.8.4-beta.13 - apiVersion: v2 appVersion: 0.8.4-beta.12 - created: "2024-07-30T07:52:02.511380467Z" + created: "2024-08-04T12:36:40.29559079Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: af08c723756e397962b2d5190dedfd50797b771c5caf58b93a6f65d8fa24785c @@ -762,7 +827,7 @@ entries: version: 0.8.4-beta.12 - apiVersion: v2 appVersion: 0.8.4-beta.11 - created: "2024-07-30T07:52:02.511028791Z" + created: "2024-08-04T12:36:40.295241662Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: a0235835ba57d185a83dd8a26281fa37b2077c3a37fe3a1c50585005695927e3 @@ -774,7 +839,7 @@ entries: version: 0.8.4-beta.11 - apiVersion: v2 appVersion: 0.8.4-beta.10 - created: "2024-07-30T07:52:02.510681654Z" + created: "2024-08-04T12:36:40.294892805Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 910ddfeba0c5e66651500dd11404afff092adc0f768ed68e0d93b04b83aa4388 @@ -786,7 +851,7 @@ entries: version: 0.8.4-beta.10 - apiVersion: v2 appVersion: 0.8.4-beta.9 - created: "2024-07-30T07:52:02.522909196Z" + created: "2024-08-04T12:36:40.307627945Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: c25ca8a9f072d6a5d02232448deaef5668aca05f24dfffbba3ebe30a4f75bb26 @@ -798,7 +863,7 @@ entries: version: 0.8.4-beta.9 - apiVersion: v2 appVersion: 0.8.4-beta.8 - created: "2024-07-30T07:52:02.522355593Z" + created: "2024-08-04T12:36:40.307287263Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 7249a39d4137e457b369384ba0a365c271c780d93a8327ce25083df763c39999 @@ -810,7 +875,7 @@ entries: version: 0.8.4-beta.8 - apiVersion: v2 appVersion: 0.8.4-beta.7 - created: "2024-07-30T07:52:02.521971066Z" + created: "2024-08-04T12:36:40.306942483Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: ee750c7c8d6ea05bd447375e624fdd7f66dd87680ab81f7b7e73df7379a9024a @@ -822,7 +887,7 @@ entries: version: 0.8.4-beta.7 - apiVersion: v2 appVersion: 0.8.4-beta.6 - created: "2024-07-30T07:52:02.521579615Z" + created: "2024-08-04T12:36:40.306595159Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 0e046be9f73df7444a995608c59af16fab9030b139b2acb4d6db6185b8eb5337 @@ -834,7 +899,7 @@ entries: version: 0.8.4-beta.6 - apiVersion: v2 appVersion: 0.8.4-beta.5 - created: "2024-07-30T07:52:02.521228801Z" + created: "2024-08-04T12:36:40.306246542Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b56e9a23d46810eccdb4cf5272cc05126da3f6db314e541959c3efb5f260620b @@ -846,7 +911,7 @@ entries: version: 0.8.4-beta.5 - apiVersion: v2 appVersion: 0.8.4-beta.4 - created: "2024-07-30T07:52:02.520885831Z" + created: "2024-08-04T12:36:40.305880052Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: 1d5808ecaf55391f3b27ae6236400066508acbd242e33db24a1ab4bffa77409e @@ -858,7 +923,7 @@ entries: version: 0.8.4-beta.4 - apiVersion: v2 appVersion: 0.8.4-beta.3 - created: "2024-07-30T07:52:02.51971155Z" + created: "2024-08-04T12:36:40.30385922Z" description: Perform numpy-like analysis on data that remains in someone elses server digest: b64efa8529d82be56c6ab60487ed24420a5614d96d2509c1f93c1003eda71a54 @@ -870,7 +935,7 @@ entries: version: 0.8.4-beta.3 - apiVersion: v2 appVersion: 0.8.4-beta.2 - created: "2024-07-30T07:52:02.514445265Z" + created: "2024-08-04T12:36:40.29925583Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -886,7 +951,7 @@ entries: version: 0.8.4-beta.2 - apiVersion: v2 appVersion: 0.8.4-beta.1 - created: "2024-07-30T07:52:02.510311433Z" + created: "2024-08-04T12:36:40.294536404Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -902,7 +967,7 @@ entries: version: 0.8.4-beta.1 - apiVersion: v2 appVersion: 0.8.3 - created: "2024-07-30T07:52:02.509304673Z" + created: "2024-08-04T12:36:40.293977417Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -918,7 +983,7 @@ entries: version: 0.8.3 - apiVersion: v2 appVersion: 0.8.3-beta.6 - created: "2024-07-30T07:52:02.508015117Z" + created: "2024-08-04T12:36:40.293306452Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -934,7 +999,7 @@ entries: version: 0.8.3-beta.6 - apiVersion: v2 appVersion: 0.8.3-beta.5 - created: "2024-07-30T07:52:02.507440846Z" + created: "2024-08-04T12:36:40.292704245Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -950,7 +1015,7 @@ entries: version: 0.8.3-beta.5 - apiVersion: v2 appVersion: 0.8.3-beta.4 - created: "2024-07-30T07:52:02.506856866Z" + created: "2024-08-04T12:36:40.291719037Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -966,7 +1031,7 @@ entries: version: 0.8.3-beta.4 - apiVersion: v2 appVersion: 0.8.3-beta.2 - created: "2024-07-30T07:52:02.506193899Z" + created: "2024-08-04T12:36:40.29037229Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -982,7 +1047,7 @@ entries: version: 0.8.3-beta.2 - apiVersion: v2 appVersion: 0.8.3-beta.1 - created: "2024-07-30T07:52:02.505603407Z" + created: "2024-08-04T12:36:40.289821799Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -998,7 +1063,7 @@ entries: version: 0.8.3-beta.1 - apiVersion: v2 appVersion: 0.8.2 - created: "2024-07-30T07:52:02.505051788Z" + created: "2024-08-04T12:36:40.289272149Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1014,7 +1079,7 @@ entries: version: 0.8.2 - apiVersion: v2 appVersion: 0.8.2-beta.60 - created: "2024-07-30T07:52:02.504407877Z" + created: "2024-08-04T12:36:40.288629377Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1030,7 +1095,7 @@ entries: version: 0.8.2-beta.60 - apiVersion: v2 appVersion: 0.8.2-beta.59 - created: "2024-07-30T07:52:02.503739971Z" + created: "2024-08-04T12:36:40.287949074Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1046,7 +1111,7 @@ entries: version: 0.8.2-beta.59 - apiVersion: v2 appVersion: 0.8.2-beta.58 - created: "2024-07-30T07:52:02.485726571Z" + created: "2024-08-04T12:36:40.287305831Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1062,7 +1127,7 @@ entries: version: 0.8.2-beta.58 - apiVersion: v2 appVersion: 0.8.2-beta.57 - created: "2024-07-30T07:52:02.484751702Z" + created: "2024-08-04T12:36:40.286641158Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1078,7 +1143,7 @@ entries: version: 0.8.2-beta.57 - apiVersion: v2 appVersion: 0.8.2-beta.56 - created: "2024-07-30T07:52:02.484113191Z" + created: "2024-08-04T12:36:40.285971797Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1094,7 +1159,7 @@ entries: version: 0.8.2-beta.56 - apiVersion: v2 appVersion: 0.8.2-beta.52 - created: "2024-07-30T07:52:02.483451787Z" + created: "2024-08-04T12:36:40.28510589Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1110,7 +1175,7 @@ entries: version: 0.8.2-beta.52 - apiVersion: v2 appVersion: 0.8.2-beta.51 - created: "2024-07-30T07:52:02.482814638Z" + created: "2024-08-04T12:36:40.283771635Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1126,7 +1191,7 @@ entries: version: 0.8.2-beta.51 - apiVersion: v2 appVersion: 0.8.2-beta.50 - created: "2024-07-30T07:52:02.482155248Z" + created: "2024-08-04T12:36:40.283114797Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1142,7 +1207,7 @@ entries: version: 0.8.2-beta.50 - apiVersion: v2 appVersion: 0.8.2-beta.49 - created: "2024-07-30T07:52:02.481447427Z" + created: "2024-08-04T12:36:40.282457467Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1158,7 +1223,7 @@ entries: version: 0.8.2-beta.49 - apiVersion: v2 appVersion: 0.8.2-beta.48 - created: "2024-07-30T07:52:02.480769352Z" + created: "2024-08-04T12:36:40.28180684Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1174,7 +1239,7 @@ entries: version: 0.8.2-beta.48 - apiVersion: v2 appVersion: 0.8.2-beta.47 - created: "2024-07-30T07:52:02.480081679Z" + created: "2024-08-04T12:36:40.280922499Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1190,7 +1255,7 @@ entries: version: 0.8.2-beta.47 - apiVersion: v2 appVersion: 0.8.2-beta.46 - created: "2024-07-30T07:52:02.478762129Z" + created: "2024-08-04T12:36:40.280356239Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1206,7 +1271,7 @@ entries: version: 0.8.2-beta.46 - apiVersion: v2 appVersion: 0.8.2-beta.45 - created: "2024-07-30T07:52:02.478196233Z" + created: "2024-08-04T12:36:40.279756867Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1222,7 +1287,7 @@ entries: version: 0.8.2-beta.45 - apiVersion: v2 appVersion: 0.8.2-beta.44 - created: "2024-07-30T07:52:02.477596554Z" + created: "2024-08-04T12:36:40.279178584Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1238,7 +1303,7 @@ entries: version: 0.8.2-beta.44 - apiVersion: v2 appVersion: 0.8.2-beta.43 - created: "2024-07-30T07:52:02.477030318Z" + created: "2024-08-04T12:36:40.278423273Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1254,7 +1319,7 @@ entries: version: 0.8.2-beta.43 - apiVersion: v2 appVersion: 0.8.2-beta.41 - created: "2024-07-30T07:52:02.476373301Z" + created: "2024-08-04T12:36:40.277134816Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1270,7 +1335,7 @@ entries: version: 0.8.2-beta.41 - apiVersion: v2 appVersion: 0.8.2-beta.40 - created: "2024-07-30T07:52:02.47569704Z" + created: "2024-08-04T12:36:40.276467759Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1286,7 +1351,7 @@ entries: version: 0.8.2-beta.40 - apiVersion: v2 appVersion: 0.8.2-beta.39 - created: "2024-07-30T07:52:02.475111828Z" + created: "2024-08-04T12:36:40.275889156Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1302,7 +1367,7 @@ entries: version: 0.8.2-beta.39 - apiVersion: v2 appVersion: 0.8.2-beta.38 - created: "2024-07-30T07:52:02.474545501Z" + created: "2024-08-04T12:36:40.275317766Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1318,7 +1383,7 @@ entries: version: 0.8.2-beta.38 - apiVersion: v2 appVersion: 0.8.2-beta.37 - created: "2024-07-30T07:52:02.473933359Z" + created: "2024-08-04T12:36:40.274746887Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1334,7 +1399,7 @@ entries: version: 0.8.2-beta.37 - apiVersion: v2 appVersion: 0.8.1 - created: "2024-07-30T07:52:02.472951647Z" + created: "2024-08-04T12:36:40.274114403Z" dependencies: - name: component-chart repository: https://charts.devspace.sh @@ -1348,4 +1413,4 @@ entries: urls: - https://openmined.github.io/PySyft/helm/syft-0.8.1.tgz version: 0.8.1 -generated: "2024-07-30T07:52:02.47182193Z" +generated: "2024-08-04T12:36:40.273320982Z" diff --git a/packages/grid/helm/repo/syft-0.9.0-beta.4.tgz b/packages/grid/helm/repo/syft-0.9.0-beta.4.tgz new file mode 100644 index 00000000000..dfd98425d2c Binary files /dev/null and b/packages/grid/helm/repo/syft-0.9.0-beta.4.tgz differ diff --git a/packages/grid/helm/repo/syft-0.9.0-beta.5.tgz b/packages/grid/helm/repo/syft-0.9.0-beta.5.tgz new file mode 100644 index 00000000000..544956ed3c5 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.9.0-beta.5.tgz differ diff --git a/packages/grid/helm/repo/syft-0.9.0.tgz b/packages/grid/helm/repo/syft-0.9.0.tgz new file mode 100644 index 00000000000..28872e9f232 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.9.0.tgz differ diff --git a/packages/grid/helm/repo/syft-0.9.1-beta.1.tgz b/packages/grid/helm/repo/syft-0.9.1-beta.1.tgz new file mode 100644 index 00000000000..40c00ff7e08 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.9.1-beta.1.tgz differ diff --git a/packages/grid/helm/repo/syft-0.9.1-beta.2.tgz b/packages/grid/helm/repo/syft-0.9.1-beta.2.tgz new file mode 100644 index 00000000000..7ac923e3529 Binary files /dev/null and b/packages/grid/helm/repo/syft-0.9.1-beta.2.tgz differ diff --git a/packages/grid/helm/syft/Chart.yaml b/packages/grid/helm/syft/Chart.yaml index 9aff9ccaa3c..c70456aa465 100644 --- a/packages/grid/helm/syft/Chart.yaml +++ b/packages/grid/helm/syft/Chart.yaml @@ -2,7 +2,7 @@ apiVersion: v2 name: syft description: Perform numpy-like analysis on data that remains in someone elses server type: application -version: "0.9.0-beta.3" -appVersion: "0.9.0-beta.3" +version: "0.9.1-beta.2" +appVersion: "0.9.1-beta.2" home: https://github.com/OpenMined/PySyft/ icon: https://raw.githubusercontent.com/OpenMined/PySyft/dev/docs/img/title_syft_light.png diff --git a/packages/grid/helm/syft/values.yaml b/packages/grid/helm/syft/values.yaml index 70603f8d85b..2b5f75f4777 100644 --- a/packages/grid/helm/syft/values.yaml +++ b/packages/grid/helm/syft/values.yaml @@ -1,7 +1,7 @@ global: # Affects only backend, frontend, and seaweedfs containers registry: docker.io - version: 0.9.0-beta.3 + version: 0.9.1-beta.2 # Force default secret values for development. DO NOT SET THIS TO FALSE IN PRODUCTION randomizedSecrets: true diff --git a/packages/syft/PYPI.md b/packages/syft/PYPI.md index 5c86db949ef..1cc97e342e2 100644 --- a/packages/syft/PYPI.md +++ b/packages/syft/PYPI.md @@ -1,13 +1,27 @@ -
+


-Syft Logo +Syft Logo -Perform data science on `data` that remains in `someone else's` server +

Data Science on data you are not allowed to see

+ +PySyft enables a new way to do data science, where you can use non-public information, without seeing nor obtaining a copy of the data itself. All you need is to connect to a Datasite! + +Datasites are like websites, but for data. Designed with the principles of structured transparency, they enable data owners to control how their data is protected and data scientists to use data without obtaining a copy. + +PySyft supports any statistical analysis or machine learning, offering support for directly running Python code - even using third-party Python libraries. + +

Supported on:

+ +✅ Linux +✅ macOS +✅ Windows +✅ Docker +✅ Kubernetes # Quickstart -✅ `Linux` ✅ `macOS` ✅ `Windows` ✅ `Docker` ✅ `Kubernetes` +Try out your first query against a live demo Datasite! ## Install Client @@ -15,34 +29,46 @@ Perform data science on `data` that remains in `someone else's` server $ pip install -U syft[data_science] ``` +More instructions are available here. + ## Launch Server +Launch a development server directly in your Jupyter Notebook: + ```python -# from Jupyter / Python import syft as sy -sy.requires(">=0.8.8,<0.8.9") + +sy.requires(">=0.9,<0.9.1") + server = sy.orchestra.launch( name="my-datasite", port=8080, create_producer=True, n_consumers=1, - dev_mode=True, + dev_mode=False, reset=True, # resets database ) ``` +or from the command line: + ```bash -# or from the command line $ syft launch --name=my-datasite --port=8080 --reset=True Starting syft-datasite server on 0.0.0.0:8080 ``` +Datasite servers can be deployed as a single container using Docker or directly in Kubernetes. Check out our deployment guide. + ## Launch Client +Main way to use a Datasite is via our Syft client, in a Jupyter Notebook. Check out our PySyft client guide: + ```python import syft as sy -sy.requires(">=0.8.8,<0.8.9") + +sy.requires(">=0.9,<0.9.1") + datasite_client = sy.login( port=8080, email="info@openmined.org", @@ -50,311 +76,100 @@ datasite_client = sy.login( ) ``` -## PySyft in 10 minutes - -📝 API Example Notebooks - -- 00-load-data.ipynb -- 01-submit-code.ipynb -- 02-review-code-and-approve.ipynb -- 03-data-scientist-download-result.ipynb -- 04-pytorch-example.ipynb -- 05-custom-policy.ipynb -- 06-multiple-code-requests.ipynb -- 07-datasite-register-control-flow.ipynb -- 08-code-version.ipynb -- 09-blob-storage.ipynb -- 10-container-images.ipynb -- 11-container-images-k8s.ipynb -- 12-custom-api-endpoint.ipynb - -## Deploy Kubernetes Helm Chart - -#### 0. Deploy Kubernetes - -Required resources: 1 CPU and 4GB RAM. However, you will need some special instructions to deploy, please consult [these instructions](https://github.com/OpenMined/PySyft/blob/dev/notebooks/tutorials/deployments/03-deploy-k8s-k3d.ipynb) or look at the resource constraint testing [here](https://github.com/OpenMined/PySyft/pull/8828#issue-2300774645). -Recommended resources: 8+ Cores and 16GB RAM - -If you're using Docker Desktop to deploy your Kubernetes, you may need to go into Settings > Resources and increase CPUs and Memory. - -**Note**: Assuming we have a Kubernetes cluster already setup. - -#### 1. Add and update Helm repo for Syft - -```sh -helm repo add openmined https://openmined.github.io/PySyft/helm -helm repo update openmined -``` - -#### 2. Search for available Syft versions - -```sh -helm search repo openmined/syft --versions --devel -``` - -#### 3. Set your preferred Syft Chart version - -```sh -SYFT_VERSION="" -``` - -#### 4. Provisioning Helm Charts - -```sh -helm install my-datasite openmined/syft --version $SYFT_VERSION --namespace syft --create-namespace --set ingress.className="traefik" -``` - -### Ingress Controllers - -For Azure AKS - -```sh -helm install ... --set ingress.className="azure-application-gateway" -``` - -For AWS EKS - -```sh -helm install ... --set ingress.className="alb" -``` - -For Google GKE we need the [`gce` annotation](https://cloud.google.com/kubernetes-engine/docs/how-to/load-balance-ingress#create-ingress). - -```sh -helm install ... --set ingress.class="gce" -``` - -## Note: - -🚨 Our old deployment tool `HAGrid` has been `deprecated`. For the updated deployment options kindly refer to: - -- 📚 Deployments +## PySyft - Getting started 📝 -## Docs and Support +Learn about PySyft via our getting started guide: -- 📚 Docs -- `#support` on Slack +- PySyft from the ground up +- Part 1: Datasets & Assets +- Part 2: Client and Datasite Access +- Part 3: Propose the research study +- Part 4: Review Code Requests +- Part 5: Retrieving Results -# Install Notes +# PySyft In-depth -- PySyft 0.8.6 Requires: 🐍 `python 3.10 - 3.12` - Run: `pip install -U syft` -- Syft Server Requires: 🐳 `docker` or ☸️ `kubernetes` +📚 Check out our docs website. -# Versions +Quick PySyft components links: -`0.9.0` (Beta) - `dev` branch 👈🏽 API - Coming soon... -`0.8.8` (Stable) - API +- DataSite Server -Deprecated: +- Syft Client -- `0.8.7` - API -- `0.8.6` - API -- `0.8.5-post.2` - API -- `0.8.4` - API -- `0.8.3` - API -- `0.8.2` - API -- `0.8.1` - API -- `0.8.0` - API -- `0.7.0` - Course 3 Updated -- `0.6.0` - Course 3 -- `0.5.1` - Course 2 + M1 Hotfix -- `0.2.0` - `0.5.0` +- Datasets API (`.datasets`) -PySyft and Syft Server use the same `version` and its best to match them up where possible. We release weekly betas which can be used in each context: +- Users API (`.users`) -PySyft (Stable): `pip install -U syft` +- Projects API (`.projects`) -PySyft (Beta): `pip install -U syft --pre` +- Request API (`.requests`) -# What is Syft? +- Code API (`.code`) -Syft +- Syft Policies API (`.policy`) -`Syft` is OpenMined's `open source` stack that provides `secure` and `private` Data Science in Python. Syft decouples `private data` from model training, using techniques like [Federated Learning](https://ai.googleblog.com/2017/04/federated-learning-collaborative.html), [Differential Privacy](https://en.wikipedia.org/wiki/Differential_privacy), and [Encrypted Computation](https://en.wikipedia.org/wiki/Homomorphic_encryption). This is done with a `numpy`-like interface and integration with `Deep Learning` frameworks, so that you as a `Data Scientist` can maintain your current workflow while using these new `privacy-enhancing techniques`. +- Settings API (`.settings`) -### Why should I use Syft? +- Notifications API (`.notifications`) -`Syft` allows a `Data Scientist` to ask `questions` about a `dataset` and, within `privacy limits` set by the `data owner`, get `answers` to those `questions`, all without obtaining a `copy` of the data itself. We call this process `Remote Data Science`. It means in a wide variety of `datasites` across society, the current `risks` of sharing information (`copying` data) with someone such as, privacy invasion, IP theft and blackmail will no longer prevent the vast `benefits` such as innovation, insights and scientific discovery which secure access will provide. +- Sync API (`.sync`) -No more cold calls to get `access` to a dataset. No more weeks of `wait times` to get a `result` on your `query`. It also means `1000x more data` in every datasite. PySyft opens the doors to a streamlined Data Scientist `workflow`, all with the individual's `privacy` at its heart. +## Why use PySyft? - +This means that the **current risks** of sharing information with someone will **no longer prevent** the vast benefits such as innovation, insights and scientific discovery. With each Datasite, data owners are able to enable `1000x more accesible data` in each scientific field and lead, together with data scientists, breakthrough innovation. -# Terminology +Learn more about our work on our website. - - - - - - - - - - - - - - - - +- `0.9.1` (Beta) - `dev` branch 👈🏽 +- Install PySyft (Beta): `pip install -U syft --pre` - - -
- -

👨🏻‍💼 Data Owners

-
- -

👩🏽‍🔬 Data Scientists

-
- +## Support -Provide `datasets` which they would like to make available for `study` by an `outside party` they may or may not `fully trust` has good intentions. +For questions about PySyft, reach out via `#support` on Slack. - - +## Syft Versions -Are end `users` who desire to perform `computations` or `answer` a specific `question` using one or more data owners' `datasets`. +:exclamation: PySyft and Syft Server must use the same `version`. -
- -

🏰 Datasite Server

-
- -

🔗 Gateway Server

-
- +**Latest Stable** -Manages the `remote study` of the data by a `Data Scientist` and allows the `Data Owner` to manage the `data` and control the `privacy guarantees` of the subjects under study. It also acts as a `gatekeeper` for the `Data Scientist's` access to the data to compute and experiment with the results. +- `0.9.0` (Stable) - Docs +- Install PySyft (Stable): `pip install -U syft` - - +**Latest Beta** -Provides services to a group of `Data Owners` and `Data Scientists`, such as dataset `search` and bulk `project approval` (legal / technical) to participate in a project. A gateway server acts as a bridge between it's members (`Datasites`) and their subscribers (`Data Scientists`) and can provide access to a collection of `datasites` at once.
+Find more about previous releases here. # Community - - - - - - -
- -
- - - - - -
-
- - - - -
- - +Supported by the OpenMined Foundation, the OpenMined Community is an online network of over 17,000 technologists, researchers, and industry professionals keen to _unlock 1000x more data in every scientific field and industry_. - - -
-
+ # Courses @@ -362,62 +177,65 @@ Provides services to a group of `Data Owners` and `Data Scientists`, such as dat # Contributors -OpenMined and Syft appreciates all contributors, if you would like to fix a bug or suggest a new feature, please see our [guidelines](https://openmined.github.io/PySyft/developer_guide/index.html).
+OpenMined and Syft appreciates all contributors, if you would like to fix a bug or suggest a new feature, please reach out via Github or Slack! + +Contributors + +# About OpenMined -Contributors +OpenMined is a non-profit foundation creating technology infrastructure that helps researchers get answers from data without needing a copy or direct access. Our community of technologists is building Syft. + + # Supporters
- +
- +
- +
- +
- +
- +
- + - + - + - + - + - + - + - + - + - + - +
-# Disclaimer - -Syft is under active development and is not yet ready for pilots on private data without our assistance. As early access participants, please contact us via [Slack](https://slack.openmined.org/) or email if you would like to ask a question or have a use case that you would like to discuss. - # License [Apache License 2.0](LICENSE)
diff --git a/packages/syft/setup.cfg b/packages/syft/setup.cfg index bfc7ded91a4..45bcc0ceeff 100644 --- a/packages/syft/setup.cfg +++ b/packages/syft/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = syft -version = attr: "0.9.0-beta.3" +version = attr: "0.9.1-beta.2" description = Perform numpy-like analysis on data that remains in someone elses server author = OpenMined author_email = info@openmined.org @@ -108,7 +108,7 @@ telemetry = opentelemetry-exporter-jaeger==1.14.0 opentelemetry-instrumentation==0.35b0 opentelemetry-instrumentation-requests==0.35b0 - ; opentelemetry-instrumentation-digma==0.9.0 + ; opentelemetry-instrumentation-digma==0.9.1-beta.2 # pytest>=8.0 broke pytest-lazy-fixture which doesn't seem to be actively maintained # temporarily pin to pytest<8 diff --git a/packages/syft/src/syft/VERSION b/packages/syft/src/syft/VERSION index f74ed6cb9df..8ab9d6fe4f9 100644 --- a/packages/syft/src/syft/VERSION +++ b/packages/syft/src/syft/VERSION @@ -1,5 +1,5 @@ # Mono Repo Global Version -__version__ = "0.9.0-beta.3" +__version__ = "0.9.1-beta.2" # elsewhere we can call this file: `python VERSION` and simply take the stdout # stdlib diff --git a/packages/syft/src/syft/__init__.py b/packages/syft/src/syft/__init__.py index 90defe7e0e1..bf996c85702 100644 --- a/packages/syft/src/syft/__init__.py +++ b/packages/syft/src/syft/__init__.py @@ -1,4 +1,4 @@ -__version__ = "0.9.0-beta.3" +__version__ = "0.9.1-beta.2" # stdlib from collections.abc import Callable @@ -19,8 +19,9 @@ from .client.registry import DatasiteRegistry from .client.registry import EnclaveRegistry from .client.registry import NetworkRegistry -from .client.search import Search -from .client.search import SearchResults + +# from .client.search import Search +# from .client.search import SearchResults from .client.syncing import compare_clients from .client.syncing import compare_states from .client.syncing import sync @@ -147,5 +148,5 @@ def hello_baby() -> None: print("Welcome to the world. \u2764\ufe0f") -def search(name: str) -> SearchResults: - return Search(_datasites()).search(name=name) +# def search(name: str) -> SearchResults: +# return Search(_datasites()).search(name=name) diff --git a/packages/syft/src/syft/client/registry.py b/packages/syft/src/syft/client/registry.py index 48627172eeb..1d06166d1b3 100644 --- a/packages/syft/src/syft/client/registry.py +++ b/packages/syft/src/syft/client/registry.py @@ -18,6 +18,7 @@ from ..service.network.server_peer import ServerPeerConnectionStatus from ..service.response import SyftException from ..types.server_url import ServerURL +from ..types.syft_object import SyftObject from ..util.constants import DEFAULT_TIMEOUT from .client import SyftClient as Client @@ -28,6 +29,10 @@ NETWORK_REGISTRY_REPO = "https://github.com/OpenMined/NetworkRegistry" +DATASITE_REGISTRY_URL = ( + "https://raw.githubusercontent.com/OpenMined/NetworkRegistry/main/datasites.json" +) + def _get_all_networks(network_json: dict, version: str) -> list[dict]: return network_json.get(version, {}).get("gateways", []) @@ -182,7 +187,148 @@ def __getitem__(self, key: str | int) -> Client: raise KeyError(f"Invalid key: {key} for {on}") +class Datasite(SyftObject): + __canonical_name__ = "ServerMetadata" + # __version__ = SYFT_OBJECT_VERSION_1 + + name: str + host_or_ip: str + version: str + protocol: str + admin_email: str + website: str + slack: str + slack_channel: str + + __attr_searchable__ = [ + "name", + "host_or_ip", + "version", + "port", + "admin_email", + "website", + "slack", + "slack_channel", + "protocol", + ] + __attr_unique__ = [ + "name", + "host_or_ip", + "version", + "port", + "admin_email", + "website", + "slack", + "slack_channel", + "protocol", + ] + __repr_attrs__ = [ + "name", + "host_or_ip", + "version", + "port", + "admin_email", + "website", + "slack", + "slack_channel", + "protocol", + ] + __table_sort_attr__ = "name" + + class DatasiteRegistry: + def __init__(self) -> None: + self.all_datasites: list[dict] = [] + try: + response = requests.get(DATASITE_REGISTRY_URL) # nosec + datasites_json = response.json() + self.all_datasites = datasites_json["datasites"] + except Exception as e: + logger.warning( + f"Failed to get Datasite Registry, go checkout: {DATASITE_REGISTRY_URL}. {e}" + ) + + @property + def online_datasites(self) -> list[dict]: + datasites = self.all_datasites + + def check_datasite(datasite: dict) -> dict[Any, Any] | None: + url = "http://" + datasite["host_or_ip"] + ":" + str(datasite["port"]) + "/" + try: + res = requests.get(url, timeout=DEFAULT_TIMEOUT) # nosec + if "status" in res.json(): + online = res.json()["status"] == "ok" + elif "detail" in res.json(): + online = True + except Exception: + online = False + if online: + version = datasite.get("version", None) + # Check if syft version was described in DatasiteRegistry + # If it's unknown, try to update it to an available version. + if not version or version == "unknown": + # If not defined, try to ask in /syft/version endpoint (supported by 0.7.0) + try: + version_url = url + "api/v2/metadata" + res = requests.get(version_url, timeout=DEFAULT_TIMEOUT) # nosec + if res.status_code == 200: + datasite["version"] = res.json()["syft_version"] + else: + datasite["version"] = "unknown" + except Exception: + datasite["version"] = "unknown" + return datasite + return None + + # We can use a with statement to ensure threads are cleaned up promptly + with futures.ThreadPoolExecutor(max_workers=20) as executor: + # map + _online_datasites = list( + executor.map(lambda datasite: check_datasite(datasite), datasites) + ) + + online_datasites = [each for each in _online_datasites if each is not None] + return online_datasites + + def _repr_html_(self) -> str: + on = self.online_datasites + if len(on) == 0: + return "(no gateways online - try syft.gateways.all_networks to see offline gateways)" + + # df = pd.DataFrame(on) + print( + "Add your datasite to this list: https://github.com/OpenMined/NetworkRegistry/" + ) + # return df._repr_html_() # type: ignore + return ([Datasite(**ds) for ds in on])._repr_html_() + + @staticmethod + def create_client(datasite: dict[str, Any]) -> Client: + # relative + from .client import connect + + try: + port = int(datasite["port"]) + protocol = datasite["protocol"] + host_or_ip = datasite["host_or_ip"] + server_url = ServerURL(port=port, protocol=protocol, host_or_ip=host_or_ip) + client = connect(url=str(server_url)) + return client.guest() + except Exception as e: + raise SyftException(f"Failed to login with: {datasite}. {e}") + + def __getitem__(self, key: str | int) -> Client: + if isinstance(key, int): + return self.create_client(datasite=self.online_datasites[key]) + else: + on = self.online_datasites + for datasite in on: + if datasite["name"] == key: + return self.create_client(datasite=datasite) + raise KeyError(f"Invalid key: {key} for {on}") + + +class NetworksOfDatasitesRegistry: def __init__(self) -> None: self.all_networks: list[dict] = [] self.all_datasites: dict[str, ServerPeer] = {} diff --git a/packages/syft/src/syft/client/search.py b/packages/syft/src/syft/client/search.py index e4450987aff..24a6648dc9c 100644 --- a/packages/syft/src/syft/client/search.py +++ b/packages/syft/src/syft/client/search.py @@ -1,17 +1,11 @@ # stdlib -from concurrent.futures import ThreadPoolExecutor # third party -from IPython.display import display # relative from ..service.dataset.dataset import Dataset -from ..service.metadata.server_metadata import ServerMetadataJSON -from ..service.network.network_service import ServerPeer -from ..service.response import SyftWarning from ..types.uid import UID from .client import SyftClient -from .registry import DatasiteRegistry class SearchResults: @@ -57,52 +51,52 @@ def __len__(self) -> int: return len(self._datasets) -class Search: - def __init__(self, datasites: DatasiteRegistry) -> None: - self.datasites: list[tuple[ServerPeer, ServerMetadataJSON | None]] = ( - datasites.online_datasites - ) - - @staticmethod - def __search_one_server( - peer_tuple: tuple[ServerPeer, ServerMetadataJSON], name: str - ) -> tuple[SyftClient | None, list[Dataset]]: - try: - peer, server_metadata = peer_tuple - client = peer.guest_client - results = client.api.services.dataset.search(name=name) - return (client, results) - except Exception as e: # noqa - warning = SyftWarning( - message=f"Got exception {e} at server {server_metadata.name}" - ) - display(warning) - return (None, []) - - def __search(self, name: str) -> list[tuple[SyftClient, list[Dataset]]]: - with ThreadPoolExecutor(max_workers=20) as executor: - # results: list[tuple[SyftClient | None, list[Dataset]]] = [ - # self.__search_one_server(peer_tuple, name) for peer_tuple in self.datasites - # ] - results: list[tuple[SyftClient | None, list[Dataset]]] = list( - executor.map( - lambda peer_tuple: self.__search_one_server(peer_tuple, name), - self.datasites, - ) - ) - # filter out SyftError - filtered = [(client, result) for client, result in results if client and result] - - return filtered - - def search(self, name: str) -> SearchResults: - """ - Searches for a specific dataset by name. - - Args: - name (str): The name of the dataset to search for. - - Returns: - SearchResults: An object containing the search results. - """ - return SearchResults(self.__search(name)) +# class Search: +# def __init__(self, datasites: DatasiteRegistry) -> None: +# self.datasites: list[tuple[ServerPeer, ServerMetadataJSON | None]] = ( +# datasites.online_datasites +# ) + +# @staticmethod +# def __search_one_server( +# peer_tuple: tuple[ServerPeer, ServerMetadataJSON], name: str +# ) -> tuple[SyftClient | None, list[Dataset]]: +# try: +# peer, server_metadata = peer_tuple +# client = peer.guest_client +# results = client.api.services.dataset.search(name=name) +# return (client, results) +# except Exception as e: # noqa +# warning = SyftWarning( +# message=f"Got exception {e} at server {server_metadata.name}" +# ) +# display(warning) +# return (None, []) + +# def __search(self, name: str) -> list[tuple[SyftClient, list[Dataset]]]: +# with ThreadPoolExecutor(max_workers=20) as executor: +# # results: list[tuple[SyftClient | None, list[Dataset]]] = [ +# # self.__search_one_server(peer_tuple, name) for peer_tuple in self.datasites +# # ] +# results: list[tuple[SyftClient | None, list[Dataset]]] = list( +# executor.map( +# lambda peer_tuple: self.__search_one_server(peer_tuple, name), +# self.datasites, +# ) +# ) +# # filter out SyftError +# filtered = [(client, result) for client, result in results if client and result] + +# return filtered + +# def search(self, name: str) -> SearchResults: +# """ +# Searches for a specific dataset by name. + +# Args: +# name (str): The name of the dataset to search for. + +# Returns: +# SearchResults: An object containing the search results. +# """ +# return SearchResults(self.__search(name)) diff --git a/packages/syft/src/syft/protocol/protocol_version.json b/packages/syft/src/syft/protocol/protocol_version.json index 0eb298b0648..49f2dcd4d8e 100644 --- a/packages/syft/src/syft/protocol/protocol_version.json +++ b/packages/syft/src/syft/protocol/protocol_version.json @@ -4,5 +4,8 @@ }, "2": { "release_name": "0.8.8.json" + }, + "3": { + "release_name": "0.9.0.json" } } diff --git a/packages/syft/src/syft/protocol/releases/0.9.0.json b/packages/syft/src/syft/protocol/releases/0.9.0.json new file mode 100644 index 00000000000..3e0585e74fb --- /dev/null +++ b/packages/syft/src/syft/protocol/releases/0.9.0.json @@ -0,0 +1,48 @@ +{ + "3": { + "object_versions": { + "User": { + "2": { + "version": 2, + "hash": "af6fb5b2e1606e97838f4a60f0536ad95db606d455e94acbd1977df866608a2c", + "action": "add" + } + }, + "UserNotificationActivity": { + "1": { + "version": 1, + "hash": "422fd01c6d9af38688a9982abd34e80794a1f6ddd444cca225d77f49189847a9", + "action": "add" + } + }, + "NotifierSettings": { + "2": { + "version": 2, + "hash": "be8b52597fc628d1b7cd22b776ee81416e1adbb04a45188778eb0e32ed1416b4", + "action": "add" + } + }, + "PwdTokenResetConfig": { + "1": { + "version": 1, + "hash": "0415a272428f22add4896c64aa9f29c8c1d35619e2433da6564eb5f1faff39ac", + "action": "add" + } + }, + "ServerSettingsUpdate": { + "3": { + "version": 3, + "hash": "335c7946f2e52d09c7b26f511120cd340717c74c5cca9107e84f839da993c55c", + "action": "add" + } + }, + "ServerSettings": { + "3": { + "version": 3, + "hash": "997667e1cba22d151857aacc2caba6b1ca73c1648adbd03461dc74a0c0c372b3", + "action": "add" + } + } + } + } +} diff --git a/packages/syft/src/syft/service/code/user_code.py b/packages/syft/src/syft/service/code/user_code.py index 4b8c1c8e7ae..1d4615e2e54 100644 --- a/packages/syft/src/syft/service/code/user_code.py +++ b/packages/syft/src/syft/service/code/user_code.py @@ -73,7 +73,6 @@ from ..action.action_object import ActionObject from ..context import AuthedServiceContext from ..dataset.dataset import Asset -from ..dataset.dataset import Dataset from ..job.job_stash import Job from ..output.output_service import ExecutionOutput from ..output.output_service import OutputService @@ -726,17 +725,6 @@ def assets(self) -> DictTuple[str, Asset] | SyftError: if isinstance(api, SyftError): return api - # get all assets on the server - datasets: list[Dataset] = api.services.dataset.get_all() - if isinstance(datasets, SyftError): - return datasets - - all_assets: dict[UID, Asset] = {} - for dataset in datasets: - for asset in dataset.asset_list: - asset._dataset_name = dataset.name - all_assets[asset.action_id] = asset - # get a flat dict of all inputs all_inputs = {} inputs = self.input_policy_init_kwargs or {} @@ -746,8 +734,11 @@ def assets(self) -> DictTuple[str, Asset] | SyftError: # map the action_id to the asset used_assets: list[Asset] = [] for kwarg_name, action_id in all_inputs.items(): - asset = all_assets.get(action_id, None) - if asset: + assets = api.dataset.get_assets_by_action_id(uid=action_id) + if isinstance(assets, SyftError): + return assets + if assets: + asset = assets[0] asset._kwarg_name = kwarg_name used_assets.append(asset) diff --git a/packages/syft/src/syft/service/notification/email_templates.py b/packages/syft/src/syft/service/notification/email_templates.py index f8baceee38a..fec2810b02a 100644 --- a/packages/syft/src/syft/service/notification/email_templates.py +++ b/packages/syft/src/syft/service/notification/email_templates.py @@ -1,4 +1,5 @@ # stdlib +from datetime import datetime from typing import TYPE_CHECKING from typing import cast @@ -22,6 +23,91 @@ def email_body(notification: "Notification", context: AuthedServiceContext) -> s return "" +@serializable(canonical_name="PasswordResetTemplate", version=1) +class PasswordResetTemplate(EmailTemplate): + @staticmethod + def email_title(notification: "Notification", context: AuthedServiceContext) -> str: + return "Password Reset Requested" + + @staticmethod + def email_body(notification: "Notification", context: AuthedServiceContext) -> str: + user_service = context.server.get_service("userservice") + + user = user_service.get_by_verify_key(notification.to_user_verify_key) + if not user: + raise Exception("User not found!") + + user.reset_token = user_service.generate_new_password_reset_token( + context.server.settings.pwd_token_config + ) + user.reset_token_date = datetime.now() + + result = user_service.stash.update( + credentials=context.credentials, user=user, has_permission=True + ) + if result.is_err(): + raise Exception("Couldn't update the user password") + + head = """ + + """ + body = f""" +
+

Password Reset

+

We received a request to reset your password. Your new temporary token is:

+

{user.reset_token}

+

Use + + syft_client.reset_password(token='{user.reset_token}', new_password=*****) + . + to reset your password.

+

If you didn't request a password reset, please ignore this email.

+
+ """ + return f"""{head} {body}""" + + @serializable(canonical_name="OnboardEmailTemplate", version=1) class OnBoardEmailTemplate(EmailTemplate): @staticmethod diff --git a/packages/syft/src/syft/service/notifier/notifier.py b/packages/syft/src/syft/service/notifier/notifier.py index 6cc24a3d720..1f5e34cf1c3 100644 --- a/packages/syft/src/syft/service/notifier/notifier.py +++ b/packages/syft/src/syft/service/notifier/notifier.py @@ -6,11 +6,13 @@ # 2) .....settings().x_enabled # 2) .....user_settings().x - # stdlib +from collections.abc import Callable +from datetime import datetime from typing import TypeVar # third party +from pydantic import BaseModel from result import Err from result import Ok from result import Result @@ -18,8 +20,12 @@ # relative from ...serde.serializable import serializable from ...server.credentials import SyftVerifyKey +from ...types.syft_migration import migrate from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject +from ...types.transforms import drop +from ...types.transforms import make_set_default from ..context import AuthedServiceContext from ..notification.notifications import Notification from ..response import SyftError @@ -28,7 +34,7 @@ from .smtp_client import SMTPClient -class BaseNotifier: +class BaseNotifier(BaseModel): def send( self, target: SyftVerifyKey, notification: Notification ) -> SyftSuccess | SyftError: @@ -38,10 +44,18 @@ def send( TBaseNotifier = TypeVar("TBaseNotifier", bound=BaseNotifier) +@serializable() +class UserNotificationActivity(SyftObject): + __canonical_name__ = "UserNotificationActivity" + __version__ = SYFT_OBJECT_VERSION_1 + count: int = 1 + date: datetime = datetime.now() + + @serializable(canonical_name="EmailNotifier", version=1) class EmailNotifier(BaseNotifier): smtp_client: SMTPClient - sender = "" + sender: str = "" def __init__( self, @@ -131,7 +145,7 @@ class NotificationPreferences(SyftObject): @serializable() -class NotifierSettings(SyftObject): +class NotifierSettingsV1(SyftObject): __canonical_name__ = "NotifierSettings" __version__ = SYFT_OBJECT_VERSION_1 __repr_attrs__ = [ @@ -139,6 +153,34 @@ class NotifierSettings(SyftObject): "email_enabled", ] active: bool = False + + notifiers: dict[NOTIFIERS, type[TBaseNotifier]] = { + NOTIFIERS.EMAIL: EmailNotifier, + } + + notifiers_status: dict[NOTIFIERS, bool] = { + NOTIFIERS.EMAIL: True, + NOTIFIERS.SMS: False, + NOTIFIERS.SLACK: False, + NOTIFIERS.APP: False, + } + + email_sender: str | None = "" + email_server: str | None = "" + email_port: int | None = 587 + email_username: str | None = "" + email_password: str | None = "" + + +@serializable() +class NotifierSettings(SyftObject): + __canonical_name__ = "NotifierSettings" + __version__ = SYFT_OBJECT_VERSION_2 + __repr_attrs__ = [ + "active", + "email_enabled", + ] + active: bool = False # Flag to identify which notification is enabled # For now, consider only the email notification # In future, Admin, must be able to have a better @@ -161,6 +203,9 @@ class NotifierSettings(SyftObject): email_username: str | None = "" email_password: str | None = "" + email_activity: dict[str, dict[SyftVerifyKey, UserNotificationActivity]] = {} + email_rate_limit: dict[str, int] = {} + @property def email_enabled(self) -> bool: return self.notifiers_status[NOTIFIERS.EMAIL] @@ -237,3 +282,17 @@ def select_notifiers(self, notification: Notification) -> list[BaseNotifier]: notifier_objs.append(self.notifiers[notifier_type]()) # type: ignore[misc] return notifier_objs + + +@migrate(NotifierSettingsV1, NotifierSettings) +def migrate_server_settings_v1_to_current() -> list[Callable]: + return [ + make_set_default("email_activity", {}), + make_set_default("email_rate_limit", {}), + ] + + +@migrate(NotifierSettings, NotifierSettingsV1) +def migrate_server_settings_v2_to_v1() -> list[Callable]: + # Use drop function on "notifications_enabled" attrubute + return [drop(["email_activity"]), drop(["email_rate_limit"])] diff --git a/packages/syft/src/syft/service/notifier/notifier_enums.py b/packages/syft/src/syft/service/notifier/notifier_enums.py index 023843f7d6c..f8c2d887ff4 100644 --- a/packages/syft/src/syft/service/notifier/notifier_enums.py +++ b/packages/syft/src/syft/service/notifier/notifier_enums.py @@ -6,6 +6,14 @@ from ...serde.serializable import serializable +@serializable(canonical_name="EMAIL_TYPES", version=1) +class EMAIL_TYPES(Enum): + PASSWORD_RESET_EMAIL = "PasswordResetTemplate" # nosec + ONBOARD_EMAIL = "OnBoardEmailTemplate" + REQUEST_EMAIL = "RequestEmailTemplate" + REQUEST_UPDATE_EMAIL = "RequestUpdateEmailTemplate" + + @serializable(canonical_name="NOTIFIERS", version=1) class NOTIFIERS(Enum): EMAIL = auto() diff --git a/packages/syft/src/syft/service/notifier/notifier_service.py b/packages/syft/src/syft/service/notifier/notifier_service.py index 391bf16abd4..c8c09ba3d50 100644 --- a/packages/syft/src/syft/service/notifier/notifier_service.py +++ b/packages/syft/src/syft/service/notifier/notifier_service.py @@ -1,4 +1,5 @@ # stdlib +from datetime import datetime import logging import traceback @@ -13,12 +14,15 @@ from ...serde.serializable import serializable from ...store.document_store import DocumentStore from ..context import AuthedServiceContext +from ..notification.email_templates import PasswordResetTemplate from ..notification.notifications import Notification from ..response import SyftError from ..response import SyftSuccess from ..service import AbstractService from .notifier import NotificationPreferences from .notifier import NotifierSettings +from .notifier import UserNotificationActivity +from .notifier_enums import EMAIL_TYPES from .notifier_enums import NOTIFIERS from .notifier_stash import NotifierStash @@ -188,6 +192,10 @@ def turn_on( message="You must provide a sender email address to enable notifications." ) + # If email_rate_limit isn't defined yet. + if not notifier.email_rate_limit: + notifier.email_rate_limit = {PasswordResetTemplate.__name__: 3} + if email_sender: try: EmailStr._validate(email_sender) @@ -320,6 +328,8 @@ def init_notifier( notifier.email_sender = email_sender notifier.email_server = smtp_host notifier.email_port = smtp_port + # Default daily email rate limit per user + notifier.email_rate_limit = {PasswordResetTemplate.__name__: 3} notifier.active = True notifier_stash.set(server.signing_key.verify_key, notifier) @@ -328,6 +338,22 @@ def init_notifier( except Exception: raise Exception(f"Error initializing notifier. \n {traceback.format_exc()}") + def set_email_rate_limit( + self, context: AuthedServiceContext, email_type: EMAIL_TYPES, daily_limit: int + ) -> SyftSuccess | SyftError: + notifier = self.stash.get(context.credentials) + if notifier.is_err(): + return SyftError(message="Couldn't set the email rate limit.") + + notifier = notifier.ok() + + notifier.email_rate_limit[email_type.value] = daily_limit + result = self.stash.update(credentials=context.credentials, settings=notifier) + if result.is_err(): + return SyftError(message="Couldn't update the notifier.") + + return SyftSuccess(message="Email rate limit updated!") + # This is not a public API. # This method is used by other services to dispatch notifications internally def dispatch_notification( @@ -343,7 +369,51 @@ def dispatch_notification( notifier = notifier.ok() # If notifier is active - if notifier.active: + if notifier.active and notification.email_template is not None: + logging.debug("Checking user email activity") + if notifier.email_activity.get(notification.email_template.__name__, None): + user_activity = notifier.email_activity[ + notification.email_template.__name__ + ].get(notification.to_user_verify_key, None) + # If there's no user activity + if user_activity is None: + notifier.email_activity[notification.email_template.__name__][ + notification.to_user_verify_key, None + ] = UserNotificationActivity(count=1, date=datetime.now()) + else: # If there's a previous user activity + current_state: UserNotificationActivity = notifier.email_activity[ + notification.email_template.__name__ + ][notification.to_user_verify_key] + date_refresh = abs(datetime.now() - current_state.date).days > 1 + + limit = notifier.email_rate_limit.get( + notification.email_template.__name__, 0 + ) + still_in_limit = current_state.count < limit + # Time interval reseted. + if date_refresh: + current_state.count = 1 + current_state.date = datetime.now() + # Time interval didn't reset yet. + elif still_in_limit or not limit: + current_state.count += 1 + current_state.date = datetime.now() + else: + return SyftError( + message="Couldn't send the email. You have surpassed the" + + " email threshold limit. Please try again later." + ) + else: + notifier.email_activity[notification.email_template.__name__] = { + notification.to_user_verify_key: UserNotificationActivity( + count=1, date=datetime.now() + ) + } + + result = self.stash.update(credentials=admin_key, settings=notifier) + if result.is_err(): + return SyftError(message="Couldn't update the notifier.") + resp = notifier.send_notifications( context=context, notification=notification ) diff --git a/packages/syft/src/syft/service/notifier/smtp_client.py b/packages/syft/src/syft/service/notifier/smtp_client.py index 1f4df6531e5..f7041c9f722 100644 --- a/packages/syft/src/syft/service/notifier/smtp_client.py +++ b/packages/syft/src/syft/service/notifier/smtp_client.py @@ -4,13 +4,14 @@ import smtplib # third party +from pydantic import BaseModel from result import Err from result import Ok from result import Result -class SMTPClient: - SOCKET_TIMEOUT = 5 # seconds +class SMTPClient(BaseModel): + SOCKET_TIMEOUT: int = 5 # seconds def __init__( self, diff --git a/packages/syft/src/syft/service/settings/settings.py b/packages/syft/src/syft/service/settings/settings.py index bf3369d7cb9..67720658c80 100644 --- a/packages/syft/src/syft/service/settings/settings.py +++ b/packages/syft/src/syft/service/settings/settings.py @@ -3,6 +3,11 @@ import logging from typing import Any +# third party +from pydantic import field_validator +from pydantic import model_validator +from typing_extensions import Self + # relative from ...abstract_server import ServerSideType from ...abstract_server import ServerType @@ -13,6 +18,7 @@ from ...types.syft_object import PartialSyftObject from ...types.syft_object import SYFT_OBJECT_VERSION_1 from ...types.syft_object import SYFT_OBJECT_VERSION_2 +from ...types.syft_object import SYFT_OBJECT_VERSION_3 from ...types.syft_object import SyftObject from ...types.transforms import drop from ...types.transforms import make_set_default @@ -26,6 +32,32 @@ logger = logging.getLogger(__name__) +@serializable() +class PwdTokenResetConfig(SyftObject): + __canonical_name__ = "PwdTokenResetConfig" + __version__ = SYFT_OBJECT_VERSION_1 + ascii: bool = True + numbers: bool = True + token_len: int = 12 + token_exp_min: int = 30 + + @model_validator(mode="after") + def validate_char_types(self) -> Self: + if not self.ascii and not self.numbers: + raise ValueError( + "Invalid config, at least one of the ascii/number options must be true." + ) + + return self + + @field_validator("token_len") + @classmethod + def check_token_len(cls, value: int) -> int: + if value < 4: + raise ValueError("Token length must be greater than 4.") + return value + + @serializable() class ServerSettingsUpdateV1(PartialSyftObject): __canonical_name__ = "ServerSettingsUpdate" @@ -43,7 +75,7 @@ class ServerSettingsUpdateV1(PartialSyftObject): @serializable() -class ServerSettingsUpdate(PartialSyftObject): +class ServerSettingsUpdateV2(PartialSyftObject): __canonical_name__ = "ServerSettingsUpdate" __version__ = SYFT_OBJECT_VERSION_2 id: UID @@ -59,6 +91,24 @@ class ServerSettingsUpdate(PartialSyftObject): notifications_enabled: bool +@serializable() +class ServerSettingsUpdate(PartialSyftObject): + __canonical_name__ = "ServerSettingsUpdate" + __version__ = SYFT_OBJECT_VERSION_3 + id: UID + name: str + organization: str + description: str + on_board: bool + signup_enabled: bool + admin_email: str + association_request_auto_approval: bool + welcome_markdown: HTMLObject | MarkdownDescription + eager_execution_enabled: bool + notifications_enabled: bool + pwd_token_config: PwdTokenResetConfig + + @serializable() class ServerSettingsV1(SyftObject): __canonical_name__ = "ServerSettings" @@ -93,7 +143,7 @@ class ServerSettingsV1(SyftObject): @serializable() -class ServerSettings(SyftObject): +class ServerSettingsV2(SyftObject): __canonical_name__ = "ServerSettings" __version__ = SYFT_OBJECT_VERSION_2 __repr_attrs__ = [ @@ -125,6 +175,41 @@ class ServerSettings(SyftObject): ) notifications_enabled: bool + +@serializable() +class ServerSettings(SyftObject): + __canonical_name__ = "ServerSettings" + __version__ = SYFT_OBJECT_VERSION_3 + __repr_attrs__ = [ + "name", + "organization", + "description", + "deployed_on", + "signup_enabled", + "admin_email", + ] + + id: UID + name: str = "Server" + deployed_on: str + organization: str = "OpenMined" + verify_key: SyftVerifyKey + on_board: bool = True + description: str = "This is the default description for a Datasite Server." + server_type: ServerType = ServerType.DATASITE + signup_enabled: bool + admin_email: str + server_side_type: ServerSideType = ServerSideType.HIGH_SIDE + show_warnings: bool + association_request_auto_approval: bool + eager_execution_enabled: bool = False + default_worker_pool: str = DEFAULT_WORKER_POOL_NAME + welcome_markdown: HTMLObject | MarkdownDescription = HTMLObject( + text=DEFAULT_WELCOME_MSG + ) + notifications_enabled: bool + pwd_token_config: PwdTokenResetConfig = PwdTokenResetConfig() + def _repr_html_(self) -> Any: # .api.services.notifications.settings() is how the server itself would dispatch notifications. # .api.services.notifications.user_settings() sets if a specific user wants or not to receive notifications. @@ -176,22 +261,53 @@ def _repr_html_(self) -> Any: """ -@migrate(ServerSettingsV1, ServerSettings) +# Server Settings Migration + + +# set +@migrate(ServerSettingsV1, ServerSettingsV2) def migrate_server_settings_v1_to_v2() -> list[Callable]: return [make_set_default("notifications_enabled", False)] -@migrate(ServerSettings, ServerSettingsV1) +@migrate(ServerSettingsV2, ServerSettings) +def migrate_server_settings_v2_to_current() -> list[Callable]: + return [make_set_default("pwd_token_config", PwdTokenResetConfig())] + + +# drop +@migrate(ServerSettingsV2, ServerSettingsV1) def migrate_server_settings_v2_to_v1() -> list[Callable]: # Use drop function on "notifications_enabled" attrubute return [drop(["notifications_enabled"])] -@migrate(ServerSettingsUpdateV1, ServerSettingsUpdate) +@migrate(ServerSettings, ServerSettingsV2) +def migrate_server_settings_current_to_v2() -> list[Callable]: + # Use drop function on "notifications_enabled" attrubute + return [drop(["pwd_token_config"])] + + +# Server Settings Update Migration + + +# set +@migrate(ServerSettingsUpdateV1, ServerSettingsUpdateV2) def migrate_server_settings_update_v1_to_v2() -> list[Callable]: return [make_set_default("notifications_enabled", False)] -@migrate(ServerSettingsUpdate, ServerSettingsUpdateV1) +@migrate(ServerSettingsUpdateV2, ServerSettingsUpdate) +def migrate_server_settings_update_v2_to_current() -> list[Callable]: + return [make_set_default("pwd_token_config", PwdTokenResetConfig())] + + +# drop +@migrate(ServerSettingsUpdateV2, ServerSettingsUpdateV1) def migrate_server_settings_update_v2_to_v1() -> list[Callable]: return [drop(["notifications_enabled"])] + + +@migrate(ServerSettingsUpdate, ServerSettingsUpdateV2) +def migrate_server_settings_update_current_to_v2() -> list[Callable]: + return [drop(["pwd_token_config"])] diff --git a/packages/syft/src/syft/service/settings/settings_service.py b/packages/syft/src/syft/service/settings/settings_service.py index 17d54df1dc0..b54abacd078 100644 --- a/packages/syft/src/syft/service/settings/settings_service.py +++ b/packages/syft/src/syft/service/settings/settings_service.py @@ -20,6 +20,7 @@ from ...util.schema import GUEST_COMMANDS from ..context import AuthedServiceContext from ..context import UnauthedServiceContext +from ..notifier.notifier_enums import EMAIL_TYPES from ..response import SyftError from ..response import SyftSuccess from ..service import AbstractService @@ -68,7 +69,12 @@ def set( else: return SyftError(message=result.err()) - @service_method(path="settings.update", name="update", autosplat=["settings"]) + @service_method( + path="settings.update", + name="update", + autosplat=["settings"], + roles=ADMIN_ROLE_LEVEL, + ) def update( self, context: AuthedServiceContext, settings: ServerSettingsUpdate ) -> Result[SyftSuccess, SyftError]: @@ -254,6 +260,13 @@ def enable_eager_execution( message = "enabled" if enable else "disabled" return SyftSuccess(message=f"Eager execution {message}") + @service_method(path="settings.set_email_rate_limit", name="set_email_rate_limit") + def set_email_rate_limit( + self, context: AuthedServiceContext, email_type: EMAIL_TYPES, daily_limit: int + ) -> SyftSuccess | SyftError: + notifier_service = context.server.get_service("notifierservice") + return notifier_service.set_email_rate_limit(context, email_type, daily_limit) + @service_method( path="settings.allow_association_request_auto_approval", name="allow_association_request_auto_approval", diff --git a/packages/syft/src/syft/service/user/user.py b/packages/syft/src/syft/service/user/user.py index 383ed3c5f6a..cb9b8860c03 100644 --- a/packages/syft/src/syft/service/user/user.py +++ b/packages/syft/src/syft/service/user/user.py @@ -1,6 +1,8 @@ # stdlib from collections.abc import Callable +from datetime import datetime from getpass import getpass +import re from typing import Any # third party @@ -17,8 +19,10 @@ from ...server.credentials import SyftSigningKey from ...server.credentials import SyftVerifyKey from ...types.syft_metaclass import Empty +from ...types.syft_migration import migrate from ...types.syft_object import PartialSyftObject from ...types.syft_object import SYFT_OBJECT_VERSION_1 +from ...types.syft_object import SYFT_OBJECT_VERSION_2 from ...types.syft_object import SyftObject from ...types.transforms import TransformContext from ...types.transforms import drop @@ -35,7 +39,7 @@ @serializable() -class User(SyftObject): +class UserV1(SyftObject): # version __canonical_name__ = "User" __version__ = SYFT_OBJECT_VERSION_1 @@ -68,10 +72,77 @@ class User(SyftObject): __repr_attrs__ = ["name", "email"] +@serializable() +class User(SyftObject): + # version + __canonical_name__ = "User" + __version__ = SYFT_OBJECT_VERSION_2 + + id: UID | None = None # type: ignore[assignment] + + # fields + notifications_enabled: dict[NOTIFIERS, bool] = { + NOTIFIERS.EMAIL: True, + NOTIFIERS.SMS: False, + NOTIFIERS.SLACK: False, + NOTIFIERS.APP: False, + } + email: EmailStr | None = None + name: str | None = None + hashed_password: str | None = None + salt: str | None = None + signing_key: SyftSigningKey | None = None + verify_key: SyftVerifyKey | None = None + role: ServiceRole | None = None + institution: str | None = None + website: str | None = None + created_at: str | None = None + # TODO where do we put this flag? + mock_execution_permission: bool = False + reset_token: str | None = None + reset_token_date: datetime | None = None + # serde / storage rules + __attr_searchable__ = ["name", "email", "verify_key", "role", "reset_token"] + __attr_unique__ = ["email", "signing_key", "verify_key"] + __repr_attrs__ = ["name", "email"] + + +@migrate(UserV1, User) +def migrate_server_user_update_v1_current() -> list[Callable]: + return [ + make_set_default("reset_token", None), + make_set_default("reset_token_date", None), + drop("__attr_searchable__"), + make_set_default( + "__attr_searchable__", + ["name", "email", "verify_key", "role", "reset_token"], + ), + ] + + +@migrate(User, UserV1) +def migrate_server_user_downgrade_current_v1() -> list[Callable]: + return [ + drop("reset_token"), + drop("reset_token_date"), + drop("__attr_searchable__"), + make_set_default( + "__attr_searchable__", ["name", "email", "verify_key", "role"] + ), + ] + + def default_role(role: ServiceRole) -> Callable: return make_set_default(key="role", value=role) +def validate_password(password: str) -> bool: + # Define the regex pattern for the password + pattern = re.compile(r"^(?=.*[a-z])(?=.*[A-Z])(?=.*\d).{8,}$") + + return bool(pattern.match(password)) + + def hash_password(context: TransformContext) -> TransformContext: if context.output is None: return context @@ -325,6 +396,24 @@ def user_create_to_user() -> list[Callable]: ] +@transform(UserV1, UserView) +def userv1_to_view_user() -> list[Callable]: + return [ + keep( + [ + "id", + "email", + "name", + "role", + "institution", + "website", + "mock_execution_permission", + "notifications_enabled", + ] + ) + ] + + @transform(User, UserView) def user_to_view_user() -> list[Callable]: return [ @@ -353,6 +442,11 @@ class UserPrivateKey(SyftObject): role: ServiceRole +@transform(UserV1, UserPrivateKey) +def userv1_to_user_verify() -> list[Callable]: + return [keep(["email", "signing_key", "id", "role"])] + + @transform(User, UserPrivateKey) def user_to_user_verify() -> list[Callable]: return [keep(["email", "signing_key", "id", "role"])] diff --git a/packages/syft/src/syft/service/user/user_service.py b/packages/syft/src/syft/service/user/user_service.py index 584ed6cce48..b37cf069f87 100644 --- a/packages/syft/src/syft/service/user/user_service.py +++ b/packages/syft/src/syft/service/user/user_service.py @@ -1,4 +1,8 @@ # stdlib +from datetime import datetime +from datetime import timedelta +import secrets +import string # relative from ...abstract_server import ServerType @@ -17,6 +21,7 @@ from ..context import ServerServiceContext from ..context import UnauthedServiceContext from ..notification.email_templates import OnBoardEmailTemplate +from ..notification.email_templates import PasswordResetTemplate from ..notification.notification_service import CreateNotification from ..notification.notification_service import NotificationService from ..notifier.notifier_enums import NOTIFIERS @@ -26,6 +31,7 @@ from ..service import SERVICE_TO_TYPES from ..service import TYPE_TO_SERVICE from ..service import service_method +from ..settings.settings import PwdTokenResetConfig from ..settings.settings_stash import SettingsStash from .user import User from .user import UserCreate @@ -36,6 +42,7 @@ from .user import UserViewPage from .user import check_pwd from .user import salt_and_hash_password +from .user import validate_password from .user_roles import ADMIN_ROLE_LEVEL from .user_roles import DATA_OWNER_ROLE_LEVEL from .user_roles import DATA_SCIENTIST_ROLE_LEVEL @@ -84,6 +91,206 @@ def create( user = result.ok() return user.to(UserView) + @service_method( + path="user.forgot_password", name="forgot_password", roles=GUEST_ROLE_LEVEL + ) + def forgot_password( + self, context: AuthedServiceContext, email: str + ) -> SyftSuccess | SyftError: + success_msg = ( + "If the email is valid, we sent a password " + + "reset token to your email or a password request to the admin." + ) + result = self.stash.get_by_email(credentials=context.credentials, email=email) + # Isn't a valid email + if result.is_err(): + return SyftSuccess(message=success_msg) + user = result.ok() + + user_role = self.get_role_for_credentials(user.verify_key) + if user_role == ServiceRole.ADMIN: + return SyftError( + message="You can't request password reset for an Admin user." + ) + + # Email is valid + # Notifications disabled + # We should just sent a notification to the admin/user about password reset + # Notifications Enabled + # Instead of changing the password here, we would change it in email template generation. + root_key = self.admin_verify_key() + root_context = AuthedServiceContext(server=context.server, credentials=root_key) + link = LinkedObject.with_context(user, context=root_context) + notifier_service = context.server.get_service("notifierservice") + # Notifier is active + notifier = notifier_service.settings(context=root_context) + notification_is_enabled = notifier.active + # Email is enabled + email_is_enabled = notifier.email_enabled + # User Preferences allow email notification + user_allow_email_notifications = user.notifications_enabled[NOTIFIERS.EMAIL] + + # This checks if the user will safely receive the email reset. + not_receive_emails = ( + not notification_is_enabled + or not email_is_enabled + or not user_allow_email_notifications + ) + + # If notifier service is not enabled. + if not_receive_emails: + message = CreateNotification( + subject="You requested password reset.", + from_user_verify_key=root_key, + to_user_verify_key=user.verify_key, + linked_obj=link, + ) + + method = context.server.get_service_method(NotificationService.send) + result = method(context=root_context, notification=message) + + message = CreateNotification( + subject="User requested password reset.", + from_user_verify_key=user.verify_key, + to_user_verify_key=root_key, + linked_obj=link, + ) + + result = method(context=root_context, notification=message) + if isinstance(result, SyftError): + return result + else: + # Email notification is Enabled + # Therefore, we can directly send a message to the + # user with its new password. + message = CreateNotification( + subject="You requested a password reset.", + from_user_verify_key=root_key, + to_user_verify_key=user.verify_key, + linked_obj=link, + notifier_types=[NOTIFIERS.EMAIL], + email_template=PasswordResetTemplate, + ) + + method = context.server.get_service_method(NotificationService.send) + result = method(context=root_context, notification=message) + if isinstance(result, SyftError): + return result + + return SyftSuccess(message=success_msg) + + @service_method( + path="user.request_password_reset", + name="request_password_reset", + roles=ADMIN_ROLE_LEVEL, + ) + def request_password_reset( + self, context: AuthedServiceContext, uid: UID + ) -> str | SyftError: + result = self.stash.get_by_uid(credentials=context.credentials, uid=uid) + if result.is_err(): + return SyftError( + message=( + f"Failed to retrieve user with UID: {uid}. Error: {str(result.err())}" + ) + ) + user = result.ok() + if user is None: + return SyftError(message=f"No user exists for given: {uid}") + + user_role = self.get_role_for_credentials(user.verify_key) + if user_role == ServiceRole.ADMIN: + return SyftError( + message="You can't request password reset for an Admin user." + ) + + user.reset_token = self.generate_new_password_reset_token( + context.server.settings.pwd_token_config + ) + user.reset_token_date = datetime.now() + + result = self.stash.update( + credentials=context.credentials, user=user, has_permission=True + ) + if result.is_err(): + return SyftError( + message=( + f"Failed to update user with UID: {uid}. Error: {str(result.err())}" + ) + ) + + return user.reset_token + + @service_method( + path="user.reset_password", name="reset_password", roles=GUEST_ROLE_LEVEL + ) + def reset_password( + self, context: AuthedServiceContext, token: str, new_password: str + ) -> SyftSuccess | SyftError: + """Resets a certain user password using a temporary token.""" + result = self.stash.get_by_reset_token( + credentials=context.credentials, token=token + ) + invalid_token_error = SyftError( + message=("Failed to reset user password. Token is invalid or expired!") + ) + + if result.is_err(): + return SyftError(message="Failed to reset user password.") + + user = result.ok() + + # If token isn't found + if user is None: + return invalid_token_error + + now = datetime.now() + time_difference = now - user.reset_token_date + + # If token expired + expiration_time = context.server.settings.pwd_token_config.token_exp_min + if time_difference > timedelta(minutes=expiration_time): + return invalid_token_error + + if not validate_password(new_password): + return SyftError( + message="Your new password must have at least 8 \ + characters, Upper case and lower case characters\ + and at least one number." + ) + + salt, hashed = salt_and_hash_password(new_password, 12) + user.hashed_password = hashed + user.salt = salt + + user.reset_token = None + user.reset_token_date = None + + result = self.stash.update( + credentials=context.credentials, user=user, has_permission=True + ) + if result.is_err(): + return SyftError( + message=(f"Failed to update user password. Error: {str(result.err())}") + ) + return SyftSuccess(message="User Password updated successfully!") + + def generate_new_password_reset_token( + self, token_config: PwdTokenResetConfig + ) -> str: + valid_characters = "" + if token_config.ascii: + valid_characters += string.ascii_letters + + if token_config.numbers: + valid_characters += string.digits + + generated_token = "".join( + secrets.choice(valid_characters) for _ in range(token_config.token_len) + ) + + return generated_token + @service_method(path="user.view", name="view", roles=DATA_SCIENTIST_ROLE_LEVEL) def view( self, context: AuthedServiceContext, uid: UID @@ -285,6 +492,20 @@ def update( # Get user to be updated by its UID result = self.stash.get_by_uid(credentials=context.credentials, uid=uid) + immutable_fields = {"created_date", "updated_date", "deleted_date"} + updated_fields = user_update.to_dict( + exclude_none=True, exclude_empty=True + ).keys() + + for field_name in immutable_fields: + if field_name in updated_fields: + return SyftError( + message=f"You are not allowed to modify '{field_name}'." + ) + + if user_update.name is not Empty and user_update.name.strip() == "": # type: ignore[comparison-overlap] + return SyftError(message="Name can't be an empty string.") + # check if the email already exists (with root's key) if user_update.email is not Empty: user_with_email_exists: bool = self.stash.email_exists( diff --git a/packages/syft/src/syft/service/user/user_stash.py b/packages/syft/src/syft/service/user/user_stash.py index 2b2b42db9e8..894d9a65115 100644 --- a/packages/syft/src/syft/service/user/user_stash.py +++ b/packages/syft/src/syft/service/user/user_stash.py @@ -23,6 +23,7 @@ # 🟡 TODO 27: it would be nice if these could be defined closer to the User EmailPartitionKey = PartitionKey(key="email", type_=str) +PasswordResetTokenPartitionKey = PartitionKey(key="reset_token", type_=str) RolePartitionKey = PartitionKey(key="role", type_=ServiceRole) SigningKeyPartitionKey = PartitionKey(key="signing_key", type_=SyftSigningKey) VerifyKeyPartitionKey = PartitionKey(key="verify_key", type_=SyftVerifyKey) @@ -74,6 +75,12 @@ def get_by_uid( qks = QueryKeys(qks=[UIDPartitionKey.with_obj(uid)]) return self.query_one(credentials=credentials, qks=qks) + def get_by_reset_token( + self, credentials: SyftVerifyKey, token: str + ) -> Result[User | None, str]: + qks = QueryKeys(qks=[PasswordResetTokenPartitionKey.with_obj(token)]) + return self.query_one(credentials=credentials, qks=qks) + def get_by_email( self, credentials: SyftVerifyKey, email: str ) -> Result[User | None, str]: diff --git a/packages/syft/src/syft/stable_version.py b/packages/syft/src/syft/stable_version.py index c0f11e1478b..85f41d02b4a 100644 --- a/packages/syft/src/syft/stable_version.py +++ b/packages/syft/src/syft/stable_version.py @@ -1 +1 @@ -LATEST_STABLE_SYFT = "0.8.8" +LATEST_STABLE_SYFT = "0.9.0" diff --git a/packages/syft/src/syft/types/syft_object_registry.py b/packages/syft/src/syft/types/syft_object_registry.py index d5cc342635e..3d0548f6cf1 100644 --- a/packages/syft/src/syft/types/syft_object_registry.py +++ b/packages/syft/src/syft/types/syft_object_registry.py @@ -131,7 +131,10 @@ def get_transform( klass_from = type_from_mro.__name__ version_from = None for type_to_mro in type_to.mro(): - if issubclass(type_to_mro, SyftBaseObject): + if ( + issubclass(type_to_mro, SyftBaseObject) + and type_to_mro != SyftBaseObject + ): klass_to = type_to_mro.__canonical_name__ version_to = type_to_mro.__version__ else: diff --git a/packages/syftcli/manifest.yml b/packages/syftcli/manifest.yml index b170a80519d..3f0d73dd3b7 100644 --- a/packages/syftcli/manifest.yml +++ b/packages/syftcli/manifest.yml @@ -1,11 +1,11 @@ manifestVersion: 1.0 -syftVersion: 0.9.0-beta.3 -dockerTag: 0.9.0-beta.3 +syftVersion: 0.9.1-beta.2 +dockerTag: 0.9.1-beta.2 images: - - docker.io/openmined/syft-frontend:0.9.0-beta.3 - - docker.io/openmined/syft-backend:0.9.0-beta.3 + - docker.io/openmined/syft-frontend:0.9.1-beta.2 + - docker.io/openmined/syft-backend:0.9.1-beta.2 - docker.io/library/mongo:7.0.4 - docker.io/traefik:v2.11.0 diff --git a/releases.md b/releases.md new file mode 100644 index 00000000000..a21919cf21f --- /dev/null +++ b/releases.md @@ -0,0 +1,29 @@ +# Releases + +:exclamation: PySyft and Syft Server must use the same `version`. + +### Latest Stable + +- `0.9.0` (Stable) - Docs +- Install PySyft (Stable): `pip install -U syft` + +### Latest Beta + +- `0.9.1` (Beta) - `dev` branch 👈🏽 +- Install PySyft (Beta): `pip install -U syft --pre` + +**Deprecated**: + +- `0.8.8` - API +- `0.8.7` - API +- `0.8.6` - API +- `0.8.5-post.2` - API +- `0.8.4` - API +- `0.8.3` - API +- `0.8.2` - API +- `0.8.1` - API +- `0.8.0` - API +- `0.7.0` - Course 3 Updated +- `0.6.0` - Course 3 +- `0.5.1` - Course 2 + M1 Hotfix +- `0.2.0` - `0.5.0` diff --git a/tests/integration/local/gateway_local_test.py b/tests/integration/local/gateway_local_test.py index e10e9cb1540..909bb3dd598 100644 --- a/tests/integration/local/gateway_local_test.py +++ b/tests/integration/local/gateway_local_test.py @@ -15,7 +15,6 @@ from syft.client.gateway_client import GatewayClient from syft.service.network.network_service import ServerPeerAssociationStatus from syft.service.network.server_peer import ServerPeer -from syft.service.network.server_peer import ServerPeerConnectionStatus from syft.service.network.utils import PeerHealthCheckTask from syft.service.request.request import Request from syft.service.response import SyftSuccess @@ -164,16 +163,19 @@ def test_create_gateway( assert isinstance(result, SyftSuccess) time.sleep(PeerHealthCheckTask.repeat_time * 2 + 1) - assert len(sy.datasites.all_datasites) == 2 - assert len(sy.datasites.online_datasites) == 2 - # check for peer connection status - for peer in gateway_client.api.services.network.get_all_peers(): - assert peer.ping_status == ServerPeerConnectionStatus.ACTIVE - - # check the guest client - client = gateway_webserver.client - assert isinstance(client, GatewayClient) - assert client.metadata.server_type == ServerType.GATEWAY.value + + # TRASK: i've changed the functionality here so that + # sy.datasites always goes out to the network + # assert len(sy.datasites.all_datasites) == 2 + # assert len(sy.datasites.online_datasites) == 2 + # # check for peer connection status + # for peer in gateway_client.api.services.network.get_all_peers(): + # assert peer.ping_status == ServerPeerConnectionStatus.ACTIVE + + # # check the guest client + # client = gateway_webserver.client + # assert isinstance(client, GatewayClient) + # assert client.metadata.server_type == ServerType.GATEWAY.value @pytest.mark.local_server diff --git a/tests/integration/network/gateway_test.py b/tests/integration/network/gateway_test.py index fd4b9751c42..b9b9eee866b 100644 --- a/tests/integration/network/gateway_test.py +++ b/tests/integration/network/gateway_test.py @@ -16,8 +16,6 @@ from syft.client.datasite_client import DatasiteClient from syft.client.gateway_client import GatewayClient from syft.client.registry import NetworkRegistry -from syft.client.search import SearchResults -from syft.service.dataset.dataset import Dataset from syft.service.network.association_request import AssociationRequestChange from syft.service.network.network_service import ServerPeerAssociationStatus from syft.service.network.routes import HTTPServerRoute @@ -133,9 +131,11 @@ def test_datasite_connect_to_gateway( assert len(gateway_client.peers) == 1 time.sleep(PeerHealthCheckTask.repeat_time * 2 + 1) + + # this is the wrong test — sy.datasites checks the gateway registry # check that the datasite is online on the network - assert len(sy.datasites.all_datasites) == 1 - assert len(sy.datasites.online_datasites) == 1 + # assert len(sy.datasites.all_datasites) == 1 + # assert len(sy.datasites.online_datasites) == 1 proxy_datasite_client = gateway_client.peers[0] datasite_peer = datasite_client.peers[0] @@ -215,25 +215,25 @@ def test_dataset_search(set_env_var, gateway_port: int, datasite_1_port: int) -> # we need to wait to make sure peers health check is done time.sleep(PeerHealthCheckTask.repeat_time * 2 + 1) # test if the dataset can be searched by the syft network - right_search = sy.search(dataset_name) - assert isinstance(right_search, SearchResults) - assert len(right_search) == 1 - dataset = right_search[0] - assert isinstance(dataset, Dataset) - assert len(dataset.assets) == 1 - assert isinstance(dataset.assets[0].mock, np.ndarray) - assert dataset.assets[0].data is None - - # search a wrong dataset should return an empty list - wrong_search = sy.search(_random_hash()) - assert len(wrong_search) == 0 + # right_search = sy.search(dataset_name) + # assert isinstance(right_search, SearchResults) + # assert len(right_search) == 1 + # dataset = right_search[0] + # assert isinstance(dataset, Dataset) + # assert len(dataset.assets) == 1 + # assert isinstance(dataset.assets[0].mock, np.ndarray) + # assert dataset.assets[0].data is None - # the datasite client delete the dataset - datasite_client.api.services.dataset.delete(uid=dataset.id) + # # search a wrong dataset should return an empty list + # wrong_search = sy.search(_random_hash()) + # assert len(wrong_search) == 0 - # Remove existing peers - assert isinstance(_remove_existing_peers(datasite_client), SyftSuccess) - assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess) + # # the datasite client delete the dataset + # datasite_client.api.services.dataset.delete(uid=dataset.id) + + # # Remove existing peers + # assert isinstance(_remove_existing_peers(datasite_client), SyftSuccess) + # assert isinstance(_remove_existing_peers(gateway_client), SyftSuccess) @pytest.mark.skip(reason="Possible bug") @@ -352,8 +352,8 @@ def test_deleting_peers(set_env_var, datasite_1_port: int, gateway_port: int) -> # check that the online datasites and gateways are updated time.sleep(PeerHealthCheckTask.repeat_time * 2 + 1) assert len(sy.gateways.all_networks) == 1 - assert len(sy.datasites.all_datasites) == 0 - assert len(sy.datasites.online_datasites) == 0 + # assert len(sy.datasites.all_datasites) == 0 + # assert len(sy.datasites.online_datasites) == 0 # reconnect the datasite to the gateway result = datasite_client.connect_to_gateway(gateway_client) diff --git a/tox.ini b/tox.ini index 5fd5cf7fc6f..4e5ae2d2001 100644 --- a/tox.ini +++ b/tox.ini @@ -1078,7 +1078,7 @@ commands = description = Prepare Migration Data pip_pre = True deps = - syft==0.8.7 + syft==0.8.8 nbmake allowlist_externals = bash