Skip to content

Commit

Permalink
Update versions on CI (#224)
Browse files Browse the repository at this point in the history
Co-authored-by: Jonatan Kłosko <[email protected]>
  • Loading branch information
grzuy and jonatanklosko committed Jul 26, 2023
1 parent 8ec5472 commit ac07a27
Show file tree
Hide file tree
Showing 6 changed files with 30 additions and 8 deletions.
11 changes: 8 additions & 3 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,20 @@ on:

jobs:
main:
name: main (${{ matrix.pair.elixir }}, ${{ matrix.pair.otp }}, lint: ${{ matrix.lint }}, slow: ${{ matrix.slow }})
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- pair:
elixir: 1.14.0
otp: 24.0
elixir: "1.15.4"
otp: "26.0.2"
lint: true
slow: true
- pair:
elixir: "1.14.5"
otp: "25.3.2.2"
env:
MIX_ENV: test
XLA_CACHE_DIR: ${{ github.workspace }}/cache/xla
Expand Down Expand Up @@ -51,4 +56,4 @@ jobs:
# mix test exits with a non-zero code if there are no matching tests,
# so we make sure we fail only when the test suite fails
run: mix test test/bumblebee_test.exs --only slow --exit-status 100 ${{ env.GIT_DIFF_FILTERED }} || [ $? -ne 100 ]
if: ${{ env.GIT_DIFF_FILTERED != '' }}
if: ${{ matrix.slow && env.GIT_DIFF_FILTERED != '' }}
6 changes: 5 additions & 1 deletion lib/bumblebee/layers/transformer.ex
Original file line number Diff line number Diff line change
Expand Up @@ -449,7 +449,11 @@ defmodule Bumblebee.Layers.Transformer do
)

hidden_state =
Axon.dropout(hidden_state, rate: dropout_rate, name: join(name, "cross_attention_dropout"))
Axon.dropout(
hidden_state,
rate: dropout_rate,
name: join(name, "cross_attention_dropout")
)

{hidden_state, {cross_attention, cross_attention_cache}}
end
Expand Down
2 changes: 1 addition & 1 deletion lib/bumblebee/shared.ex
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ defmodule Bumblebee.Shared do
function(),
keyword(),
boolean(),
(() -> list(Nx.Tensor.t()))
(-> list(Nx.Tensor.t()))
) :: function()
def compile_or_jit(fun, defn_options, compile?, template_fun) do
if compile? do
Expand Down
6 changes: 5 additions & 1 deletion lib/bumblebee/text/clip_text.ex
Original file line number Diff line number Diff line change
Expand Up @@ -137,7 +137,11 @@ defmodule Bumblebee.Text.ClipText do
encoder_outputs = encoder(embeddings, inputs["attention_mask"], spec, name: "encoder")

hidden_state =
Axon.layer_norm(encoder_outputs.hidden_state, epsilon: spec.layer_norm_epsilon, name: "norm")
Axon.layer_norm(
encoder_outputs.hidden_state,
epsilon: spec.layer_norm_epsilon,
name: "norm"
)

pooled_state =
Axon.layer(
Expand Down
7 changes: 6 additions & 1 deletion lib/bumblebee/text/t5.ex
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,12 @@ defmodule Bumblebee.Text.T5 do
}
else
embeddings =
embedder(inputs["input_ids"], inputs["input_embeddings"], spec, name: "encoder_embedder")
embedder(
inputs["input_ids"],
inputs["input_embeddings"],
spec,
name: "encoder_embedder"
)

embeddings
|> encoder(inputs["attention_mask"], inputs["attention_head_mask"], spec, name: "encoder")
Expand Down
6 changes: 5 additions & 1 deletion lib/bumblebee/vision/blip_vision.ex
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,11 @@ defmodule Bumblebee.Vision.BlipVision do
encoder_outputs = encoder(embeddings, spec, name: "encoder")

hidden_state =
Axon.layer_norm(encoder_outputs.hidden_state, epsilon: spec.layer_norm_epsilon, name: "norm")
Axon.layer_norm(
encoder_outputs.hidden_state,
epsilon: spec.layer_norm_epsilon,
name: "norm"
)

pooled_state =
hidden_state
Expand Down

0 comments on commit ac07a27

Please sign in to comment.