Skip to content

Commit

Permalink
In preparation to the release
Browse files Browse the repository at this point in the history
  • Loading branch information
dfdx committed May 30, 2022
1 parent 3f22e99 commit 795dbe4
Show file tree
Hide file tree
Showing 10 changed files with 53 additions and 45 deletions.
2 changes: 0 additions & 2 deletions REQUIRE

This file was deleted.

2 changes: 1 addition & 1 deletion docs/Project.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
[deps]
Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
Yota = "e3819d11-95af-5eea-9727-70c091663a01"
Spark = "e3819d11-95af-5eea-9727-70c091663a01"
1 change: 0 additions & 1 deletion docs/REQUIRE

This file was deleted.

2 changes: 2 additions & 0 deletions docs/localdocs.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
#!/bin/bash
julia -e 'using LiveServer; serve(dir="build")'
30 changes: 12 additions & 18 deletions docs/make.jl
Original file line number Diff line number Diff line change
@@ -1,25 +1,19 @@
using Documenter, Spark

#load_dir(x) = map(file -> joinpath("lib", x, file), readdir(joinpath(Base.source_dir(), "src", "lib", x)))
using Documenter
using Spark

makedocs(
modules = [Spark],
clean = false,
format = [:html],
sitename = "Spark",
pages = Any[
"Introduction" => "index.md",
"Getting Started" => "getting_started.md",
"Structured Streaming" => "structured_streaming.md",
"API Reference" => "api.md"
]
format = Documenter.HTML(),
modules = [Spark],
pages = [
"Main" => "index.md",
"Cookbook" => "cookbook.md",
"Build your own AD" => "design.md",
"Reference" => "reference.md",
],
)

deploydocs(
repo = "github.com/dfdx/Spark.jl.git",
julia = "0.6",
osname = "linux",
deps = nothing,
make = nothing,
target = "build",
repo = "github.com/dfdx/Spark.jl.git",
devbranch = "main",
)
25 changes: 25 additions & 0 deletions docs/make_old.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
using Documenter, Spark

#load_dir(x) = map(file -> joinpath("lib", x, file), readdir(joinpath(Base.source_dir(), "src", "lib", x)))

makedocs(
modules = [Spark],
clean = false,
format = [:html],
sitename = "Spark",
pages = Any[
"Introduction" => "index.md",
"Getting Started" => "getting_started.md",
"Structured Streaming" => "structured_streaming.md",
"API Reference" => "api.md"
]
)

deploydocs(
repo = "github.com/dfdx/Spark.jl.git",
julia = "0.6",
osname = "linux",
deps = nothing,
make = nothing,
target = "build",
)
12 changes: 0 additions & 12 deletions examples/basic.jl

This file was deleted.

20 changes: 11 additions & 9 deletions src/core.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ import Statistics
# using TableTraits
# using IteratorInterfaceExtensions

export SparkSession, DataFrame, GroupedData, Column, Row
export StructType, StructField, DataType
export Window, WindowSpec


include("chainable.jl")
include("init.jl")
Expand All @@ -26,18 +30,16 @@ function __init__()
end


# During development we use just include("core.jl") and get a single
# namespace with all the functions. But for the outer world we also
# provide a set of modules mimicing PySpark package layout
# pseudo-modules for some specific functions not exported by default

module Compiler
using Reexport
@reexport import Spark: udf, jcall2, create_instance, create_class
end

module SQL
using Reexport
@reexport import Spark: SparkSession, DataFrame, GroupedData, Column, Row
@reexport import Spark: StructType, StructField, DataType
@reexport import Spark: Window, WindowSpec
end
# module SQL
# using Reexport
# @reexport import Spark: SparkSession, DataFrame, GroupedData, Column, Row
# @reexport import Spark: StructType, StructField, DataType
# @reexport import Spark: Window, WindowSpec
# end
2 changes: 1 addition & 1 deletion test/test_compiler.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import Spark.Compiler: jcall2, udf
import Spark: jcall2, udf
import Spark.JavaCall: @jimport, jdouble, JString

const JDouble = @jimport java.lang.Double
Expand Down
2 changes: 1 addition & 1 deletion test/test_sql.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
using Spark.SQL
using Spark
using Spark.Compiler


Expand Down

0 comments on commit 795dbe4

Please sign in to comment.