Skip to content

Commit

Permalink
new interface with deprecations for old interface (#13)
Browse files Browse the repository at this point in the history
* new interface with deprecations for old interface

* slight interface change; update README

* update tests and benchmarks

* add project.toml, remove require, update travis.yml

* remove project.toml

* add Project.toml
  • Loading branch information
Jutho authored Jul 18, 2019
1 parent 64b01ab commit d24a6d2
Show file tree
Hide file tree
Showing 7 changed files with 74 additions and 88 deletions.
23 changes: 18 additions & 5 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,14 +1,27 @@
# Documentation: http://docs.travis-ci.com/user/languages/julia/
language: julia
os:
- linux
- osx
julia:
- 0.7
- 1.0
- 1.1
- 1.2
- nightly
notifications:
email: false
# uncomment the following lines to override the default test script
#script:
# - if [[ -a .git/shallow ]]; then git fetch --unshallow; fi
# - julia --check-bounds=yes -e 'Pkg.clone(pwd()); Pkg.build("LRUCache"); Pkg.test("LRUCache"; coverage=true)'

matrix:
allow_failures:
- julia: nightly

env:
matrix:
- JULIA_NUM_THREADS=1
# - JULIA_NUM_THREADS=4
#
## uncomment the following lines to override the default test script

after_success:
# push coverage results to Codecov and Coveralls
- julia -e 'using Pkg; Pkg.add("Coverage"); using Coverage; Codecov.submit(Codecov.process_folder()); Coveralls.submit(Coveralls.process_folder())'
12 changes: 12 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
name = "LRUCache"
uuid = "8ac3fa9e-de4c-5943-b1dc-09c6b5f20637"
version = "0.3.0"

[compat]
julia = "1"

[extras]
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"

[targets]
test = ["Test"]
75 changes: 16 additions & 59 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,36 +10,36 @@ first.

## Interface

`LRU` supports the standard `Associative` interface. Some examples of common
`LRU` supports the standard `AbstractDict` interface. Some examples of common
operations are shown below:

**Creation**

```julia
lru = LRU{K, V}([, maxsize=100])
lru = LRU{K, V}(, maxsize = size)
```

Create an LRU Cache with maximum size `maxsize`. If `maxsize` is not provided,
a default of `100` is used.
Create an LRU Cache with a maximum size (number of items) specified by the *required*
keyword argument `maxsize`.

**Add an item to the cache**

```julia
setitem!(lru, key, value)
setindex!(lru, value, key)
lru[key] = value
```

**Lookup an item in the cache**

```julia
getitem(lru, key)
getindex(lru, key)
lru[key]
```

**Change the maxsize**

```julia
resize!(lru, size)
resize!(lru; maxsize = size)
```

**Empty the cache**
Expand All @@ -50,16 +50,15 @@ empty!(lru)

### Caching Use

To effectively use `LRU` as a cache, several functions and macros have been
defined that allow for easy checking if an item is present, and if not quickly
calculating a default.
To effectively use `LRU` as a cache, several functions from the `AbstractDict` interface
can be used for easy checking if an item is present, and if not quickly calculating a
default.

#### get!(lru::LRU, key, default)

Returns the value stored in `lru` for `key` if present. If not, stores `key =>
default`, and returns `default`.


#### get!(default::Callable, lru::LRU, key)

Like above, except if `key` is not present, stores `key => default()`, and
Expand All @@ -71,55 +70,11 @@ get!(lru, key) do
end
```

#### @get!(lru::LRU, key, default)

The `do` block syntax of `get!` is nice, but can be slow due to how Julia
currently handles anonymous functions. The `@get!` macro is an attempt to get
around this issue. It takes 3 parameters: the cache, a key to lookup, and a
default. Note that as this is handled with meta-programming, the default can
be *anything that can be set as the right-hand-side of an assignment*. Example.

```julia
const lru = LRU{Float64, Int}()

_foo(a::Float64) = ... # Some long-running calculation that returns an Int
#### get(lru::LRU, key, default)

function foo(a::Float64)
@get! lru a _foo(a)
end
```
Returns the value stored in `lru` for `key` if present. If not, returns default without storing this value in `lru`. Also comes in the following form:

This expands (roughly) to:

```julia
function foo(a::Float64)
return begin
if haskey(lru, a)
value = lru[a]
else
value = _foo(a)
lru[a] = value
end
value
end
end
```

The same can be done with a constant default value, or a `begin` block:

```julia
const lru = LRU{Int, Int}()

function fib(a::Int)
@get! lru a begin
if a < 2
a
else
fib(a - 1) + fib(a - 2)
end
end
end
```
#### get(default::Callable, lru::LRU, key)

## Example

Expand All @@ -143,6 +98,8 @@ As this function requires more than one parameter, we need a cache from
const lru = LRU{Tuple{Float64, Float64}, Float64}()

function cached_foo(a::Float64, b::Float64)
@get! lru (a, b) foo(a, b)
get!(lru, (a, b)) do
foo(a,b)
end
end
```
1 change: 0 additions & 1 deletion REQUIRE

This file was deleted.

24 changes: 19 additions & 5 deletions src/LRUCache.jl
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,13 @@ mutable struct LRU{K,V} <: AbstractDict{K,V}
q::LRUList{K, V}
maxsize::Int

LRU{K, V}(m::Int=__MAXCACHE__) where {K, V} = new{K, V}(Dict{K, V}(), LRUList{K, V}(), m)
LRU{K, V}(; maxsize::Int) where {K, V} =
new{K, V}(Dict{K, V}(), LRUList{K, V}(), maxsize)
end
LRU(m::Int=__MAXCACHE__) = LRU{Any, Any}(m)
LRU(; maxsize::Int) = LRU{Any,Any}(; maxsize = maxsize)

Base.@deprecate LRU(m::Int=__MAXCACHE__) LRU(; maxsize = m)
Base.@deprecate (LRU{K, V}(m::Int=__MAXCACHE__) where {K, V}) (LRU{K, V}(; maxsize = m))

Base.show(io::IO, lru::LRU{K, V}) where {K, V} = print(io,"LRU{$K, $V}($(lru.maxsize))")

Expand All @@ -27,8 +31,15 @@ Base.sizehint!(lru::LRU, n::Integer) = sizehint!(lru.ht, n)

Base.haskey(lru::LRU, key) = haskey(lru.ht, key)
Base.get(lru::LRU, key, default) = haskey(lru, key) ? lru[key] : default
Base.get(default::Base.Callable, lru::LRU, key) = haskey(lru, key) ? lru[key] : default()

macro get!(lru, key, default)
@warn "`@get! lru key default(args...)` is deprecated, use `get!(()->default(args...), lru, key)` or
```
get!(lru, key) do
default(args...)
end
```"
quote
if haskey($(esc(lru)), $(esc(key)))
value = $(esc(lru))[$(esc(key))]
Expand Down Expand Up @@ -87,9 +98,12 @@ function Base.setindex!(lru::LRU{K, V}, v, key) where {K, V}
return lru
end

function Base.resize!(lru::LRU, n::Int)
n < 0 && error("size must be a positive integer")
lru.maxsize = n
import Base: resize!
Base.@deprecate resize!(lru::LRU, m::Int) resize!(lru; maxsize = m)

function resize!(lru::LRU; maxsize::Int)
maxsize < 0 && error("size must be a positive integer")
lru.maxsize = maxsize
for i in 1:(length(lru) - lru.maxsize)
rm = pop!(lru.q)
delete!(lru.ht, rm.k)
Expand Down
10 changes: 5 additions & 5 deletions test/benchmark.jl
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ using LRUCache
# rather than on the function itself. Which is optimal for comparing
# improvements to the cache speed.

const FIBCACHE = LRU{Int, Int}(10)
const FIBCACHE = LRU{Int, Int}(; maxsize = 10)

function fib(a::Int)
@get! FIBCACHE a begin
get!(FIBCACHE, a) do
if a < 2
a
else
Expand All @@ -20,7 +20,7 @@ function fib(a::Int)
end

function fib_benchmark(cachesize)
resize!(FIBCACHE, cachesize)
resize!(FIBCACHE; maxsize = cachesize)
empty!(FIBCACHE)
println("Cache Size = $cachesize")
@time fib(50)
Expand All @@ -41,7 +41,7 @@ println()

# Now we benchmark individual operations
function setup_cache(cachesize, items)
resize!(FIBCACHE, cachesize)
resize!(FIBCACHE; maxsize = cachesize)
empty!(FIBCACHE)
for i in items
FIBCACHE[i] = i
Expand Down Expand Up @@ -74,7 +74,7 @@ println()
println("Insertion Benchmarks")
println("--------------------")
# Cache is empty, with size 5
resize!(FIBCACHE, 5)
resize!(FIBCACHE; maxsize = 5)
empty!(FIBCACHE)
println("Insertion when cache is not full, element not in cache")
for i in 1:5
Expand Down
17 changes: 4 additions & 13 deletions test/test.jl
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ function test_order(lru, keys, vals)
end
end

const CACHE = LRU{Int, Int}(20)
const CACHE = LRU{Int, Int}(; maxsize = 20)
# Test insertion ordering
kvs = 1:10
for i in reverse(kvs)
Expand All @@ -35,10 +35,10 @@ end
test_order(CACHE, kvs, kvs)

# Test least recently used items are evicted
resize!(CACHE, 5)
resize!(CACHE; maxsize = 5)
test_order(CACHE, kvs[1:5], kvs[1:5])

resize!(CACHE, 10)
resize!(CACHE; maxsize = 10)
test_order(CACHE, kvs[1:5], kvs[1:5])

kvs = 1:11
Expand Down Expand Up @@ -74,19 +74,10 @@ get!(CACHE, 2) do
error("this shouldn't have been called!")
end

# Test @get! with begin block
val = @get! CACHE 3 begin
4
end
@test val == 4
@get! CACHE 3 begin
error("this shouldn't have been called!")
end

# Test Abstract typed cache. All we're checking for here is that the container
# is able to hold abstract types without issue. Insertion order is already
# tested above.
const CACHE2 = LRU{String, Integer}(5)
const CACHE2 = LRU{String, Integer}(; maxsize = 5)
CACHE2["test"] = 4
@test CACHE2["test"] == 4

Expand Down

0 comments on commit d24a6d2

Please sign in to comment.