Skip to content

Commit

Permalink
Merge branch 'v1.11' into issue_1298
Browse files Browse the repository at this point in the history
  • Loading branch information
hhunter-ms authored Sep 14, 2023
2 parents fc21e8a + baf93f1 commit afdb7f8
Show file tree
Hide file tree
Showing 240 changed files with 4,245 additions and 2,352 deletions.
118 changes: 118 additions & 0 deletions .github/scripts/algolia.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
import os
from re import S
import sys
import json
from bs4 import BeautifulSoup
from algoliasearch.search_client import SearchClient

url = "docs.dapr.io"
if len(sys.argv) > 1:
starting_directory = os.path.join(os.getcwd(), str(sys.argv[1]))
else:
starting_directory = os.getcwd()

ALGOLIA_APP_ID = os.getenv('ALGOLIA_APP_ID')
ALGOLIA_API_KEY = os.getenv('ALGOLIA_API_WRITE_KEY')
ALGOLIA_INDEX_NAME = os.getenv('ALGOLIA_INDEX_NAME')

client = SearchClient.create(ALGOLIA_APP_ID, ALGOLIA_API_KEY)
index = client.init_index(ALGOLIA_INDEX_NAME)

excluded_files = [
"404.html",
]

exluded_directories = [
"zh-hans",
]

rankings = {
"Getting started": 0,
"Concepts": 100,
"Developing applications": 200,
"Operations": 300,
"Reference": 400,
"Contributing": 500,
"Home": 600
}

def scan_directory(directory: str, pages: list):
if os.path.basename(directory) in exluded_directories:
print(f'Skipping directory: {directory}')
return
for file in os.listdir(directory):
path = os.path.join(directory, file)
if os.path.isfile(path):
if file.endswith(".html") and file not in excluded_files:
if '<!-- DISABLE_ALGOLIA -->' not in open(path, encoding="utf8").read():
print(f'Indexing: {path}')
pages.append(path)
else:
print(f'Skipping hidden page: {path}')
else:
scan_directory(path, pages)

def parse_file(path: str):
data = {}
data["hierarchy"] = {}
data["rank"] = 999
data["subrank"] = 99
data["type"] = "lvl2"
data["lvl0"] = ""
data["lvl1"] = ""
data["lvl2"] = ""
data["lvl3"] = ""
text = ""
subrank = 0
with open(path, "r", errors='ignore') as file:
content = file.read()
soup = BeautifulSoup(content, "html.parser")
for meta in soup.find_all("meta"):
if meta.get("name") == "description":
data["lvl2"] = meta.get("content")
data["hierarchy"]["lvl1"] = meta.get("content")
elif meta.get("property") == "og:title":
data["lvl0"] = meta.get("content")
data["hierarchy"]["lvl0"] = meta.get("content")
data["hierarchy"]["lvl2"] = meta.get("content")
elif meta.get("property") == "og:url":
data["url"] = meta.get("content")
data["path"] = meta.get("content").split(url)[1]
data["objectID"] = meta.get("content").split(url)[1]
breadcrumbs = soup.find_all("li", class_="breadcrumb-item")
try:
subrank = len(breadcrumbs)
data["subrank"] = subrank
except:
subrank = 99
data["subrank"] = 99
for bc in breadcrumbs:
section = bc.text.strip()
data["lvl1"] = section
data["hierarchy"]["lvl0"] = section
try:
data["rank"] = rankings[section] + subrank
except:
print(f"Rank not found for section {section}")
data["rank"] = 998
break
for p in soup.find_all("p"):
if p.text != "":
text = text + p.text
data["text"] = text
return data

def index_payload(payload):
res = index.replace_all_objects(payload)
res.wait()


if __name__ == "__main__":
pages = []
payload = []
scan_directory(starting_directory, pages)
for page in pages:
data = parse_file(page)
if "objectID" in data:
payload.append(data)
index_payload(payload)
99 changes: 79 additions & 20 deletions .github/workflows/website-root.yml
Original file line number Diff line number Diff line change
@@ -1,43 +1,75 @@
name: Azure Static Web App Root

on:
workflow_dispatch:
push:
branches:
- v1.10
- v1.11
pull_request:
types: [opened, synchronize, reopened, closed]
branches:
- v1.10
- v1.11

concurrency:
# Cancel the previously triggered build for only PR build.
group: website-${{ github.event.pull_request.number || github.sha }}
cancel-in-progress: true

jobs:
build_and_deploy_job:
if: github.event_name == 'push' || (github.event_name == 'pull_request' && github.event.action != 'closed')
name: Build Hugo Website
if: github.event.action != 'closed'
runs-on: ubuntu-latest
name: Build and Deploy Job
env:
SWA_BASE: 'proud-bay-0e9e0e81e'
HUGO_ENV: production
steps:
- uses: actions/checkout@v3
- name: Checkout docs repo
uses: actions/checkout@v3
with:
submodules: true
- name: Setup Node
uses: actions/setup-node@v2
with:
node-version: '14'
- name: Setup Hugo
uses: peaceiris/[email protected]
with:
submodules: recursive
fetch-depth: 0
hugo-version: 0.102.3
extended: true
- name: Setup Docsy
run: cd daprdocs && git submodule update --init --recursive && sudo npm install -D --save autoprefixer && sudo npm install -D --save postcss-cli
- name: Build And Deploy
id: builddeploy
run: |
cd daprdocs
git submodule update --init --recursive
sudo npm install -D --save autoprefixer
sudo npm install -D --save postcss-cli
- name: Build Hugo Website
run: |
cd daprdocs
git config --global --add safe.directory /github/workspace
if [ $GITHUB_EVENT_NAME == 'pull_request' ]; then
STAGING_URL="https://${SWA_BASE}-${{github.event.number}}.westus2.azurestaticapps.net/"
fi
hugo ${STAGING_URL+-b "$STAGING_URL"}
- name: Deploy docs site
uses: Azure/static-web-apps-deploy@v1
env:
HUGO_ENV: production
HUGO_VERSION: "0.100.2"
with:
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
skip_deploy_on_missing_secrets: true
repo_token: ${{ secrets.GITHUB_TOKEN }} # Used for Github integrations (i.e. PR comments)
repo_token: ${{ secrets.GITHUB_TOKEN }}
action: "upload"
app_location: "/daprdocs"
app_build_command: "git config --global --add safe.directory /github/workspace && hugo"
output_location: "public"
skip_api_build: true
app_location: "daprdocs/public/"
api_location: "daprdocs/public/"
output_location: ""
skip_app_build: true
skip_deploy_on_missing_secrets: true
- name: Upload Hugo artifacts
uses: actions/upload-artifact@v3
with:
name: hugo_build
path: ./daprdocs/public/
if-no-files-found: error

close_pull_request_job:
close_staging_site:
if: github.event_name == 'pull_request' && github.event.action == 'closed'
runs-on: ubuntu-latest
name: Close Pull Request Job
Expand All @@ -48,3 +80,30 @@ jobs:
with:
azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }}
action: "close"
skip_deploy_on_missing_secrets: true

algolia_index:
name: Index site for Algolia
if: github.event_name == 'push'
needs: ['build_and_deploy_job']
runs-on: ubuntu-latest
env:
ALGOLIA_APP_ID: ${{ secrets.ALGOLIA_APP_ID }}
ALGOLIA_API_WRITE_KEY: ${{ secrets.ALGOLIA_API_WRITE_KEY }}
ALGOLIA_INDEX_NAME: daprdocs
steps:
- name: Checkout docs repo
uses: actions/checkout@v2
with:
submodules: false
- name: Download Hugo artifacts
uses: actions/download-artifact@v3
with:
name: hugo_build
path: site/
- name: Install Python packages
run: |
pip install --upgrade bs4
pip install --upgrade 'algoliasearch>=2.0,<3.0'
- name: Index site
run: python ./.github/scripts/algolia.py ./site
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,8 @@ The following branches are currently maintained:

| Branch | Website | Description |
| ------------------------------------------------------------ | -------------------------- | ------------------------------------------------------------------------------------------------ |
| [v1.10](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. |
| [v1.11](https://github.com/dapr/docs/tree/v1.11) (pre-release) | https://v1-11.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.11+ go here. |
| [v1.11](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. |
| [v1.12](https://github.com/dapr/docs/tree/v1.12) (pre-release) | https://v1-12.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.12+ go here. |

For more information visit the [Dapr branch structure](https://docs.dapr.io/contributing/docs-contrib/contributing-docs/#branch-guidance) document.

Expand Down
56 changes: 18 additions & 38 deletions daprdocs/assets/scss/_code.scss
Original file line number Diff line number Diff line change
@@ -1,64 +1,44 @@
// Code formatting.

.copy-code-button {
color: #272822;
background-color: #FFF;
border-color: #0D2192;
border: 2px solid;
border-radius: 3px 3px 0px 0px;

/* right-align */
display: block;
margin-left: auto;
margin-right: 0;

margin-bottom: -2px;
padding: 3px 8px;
font-size: 0.8em;
.highlight .copy-icon {
position: absolute;
right: 20px;
top: 18px;
opacity: 0.7;
}

.copy-code-button:hover {
cursor: pointer;
background-color: #F2F2F2;
}

.copy-code-button:focus {
/* Avoid an ugly focus outline on click in Chrome,
but darken the button for accessibility.
See https://stackoverflow.com/a/25298082/1481479 */
background-color: #E6E6E6;
outline: 0;
}

.copy-code-button:active {
background-color: #D9D9D9;
}

.highlight pre {
/* Avoid pushing up the copy buttons. */
margin: 0;
}

.td-content {
// Highlighted code.

// Highlighted code.
.highlight {
@extend .card;

margin: 0rem 0;
padding: 0rem;

margin-bottom: 2rem;

max-width: 100%;


border: none;

pre {
margin: 0;
padding: 1rem;
border-radius: 10px;
}
}

// Inline code
p code, li > code, table code {
p code,
li>code,
table code {
color: inherit;
padding: 0.2em 0.4em;
margin: 0;
Expand All @@ -78,11 +58,11 @@
word-wrap: normal;
background-color: $gray-100;
padding: $spacer;

max-width: 100%;

> code {
background-color: inherit !important;
>code {
background-color: inherit !important;
padding: 0;
margin: 0;
font-size: 100%;
Expand Down
Loading

0 comments on commit afdb7f8

Please sign in to comment.