diff --git a/REQUIRE b/REQUIRE deleted file mode 100644 index 0ceb029..0000000 --- a/REQUIRE +++ /dev/null @@ -1,2 +0,0 @@ -julia 0.7 -JavaCall 0.7.1 diff --git a/docs/Project.toml b/docs/Project.toml index 322f226..c764e3e 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,3 +1,3 @@ [deps] Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" -Yota = "e3819d11-95af-5eea-9727-70c091663a01" +Spark = "e3819d11-95af-5eea-9727-70c091663a01" diff --git a/docs/REQUIRE b/docs/REQUIRE deleted file mode 100644 index 002f6da..0000000 --- a/docs/REQUIRE +++ /dev/null @@ -1 +0,0 @@ -Documenter \ No newline at end of file diff --git a/docs/localdocs.sh b/docs/localdocs.sh new file mode 100755 index 0000000..77f9fc9 --- /dev/null +++ b/docs/localdocs.sh @@ -0,0 +1,2 @@ +#!/bin/bash +julia -e 'using LiveServer; serve(dir="build")' \ No newline at end of file diff --git a/docs/make.jl b/docs/make.jl index db9cb38..44c213b 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,25 +1,19 @@ -using Documenter, Spark - -#load_dir(x) = map(file -> joinpath("lib", x, file), readdir(joinpath(Base.source_dir(), "src", "lib", x))) +using Documenter +using Spark makedocs( - modules = [Spark], - clean = false, - format = [:html], sitename = "Spark", - pages = Any[ - "Introduction" => "index.md", - "Getting Started" => "getting_started.md", - "Structured Streaming" => "structured_streaming.md", - "API Reference" => "api.md" - ] + format = Documenter.HTML(), + modules = [Spark], + pages = [ + "Main" => "index.md", + "Cookbook" => "cookbook.md", + "Build your own AD" => "design.md", + "Reference" => "reference.md", + ], ) deploydocs( - repo = "github.com/dfdx/Spark.jl.git", - julia = "0.6", - osname = "linux", - deps = nothing, - make = nothing, - target = "build", + repo = "github.com/dfdx/Spark.jl.git", + devbranch = "main", ) \ No newline at end of file diff --git a/docs/make_old.jl b/docs/make_old.jl new file mode 100644 index 0000000..db9cb38 --- /dev/null +++ b/docs/make_old.jl @@ -0,0 +1,25 @@ +using Documenter, Spark + +#load_dir(x) = map(file -> joinpath("lib", x, file), readdir(joinpath(Base.source_dir(), "src", "lib", x))) + +makedocs( + modules = [Spark], + clean = false, + format = [:html], + sitename = "Spark", + pages = Any[ + "Introduction" => "index.md", + "Getting Started" => "getting_started.md", + "Structured Streaming" => "structured_streaming.md", + "API Reference" => "api.md" + ] +) + +deploydocs( + repo = "github.com/dfdx/Spark.jl.git", + julia = "0.6", + osname = "linux", + deps = nothing, + make = nothing, + target = "build", +) \ No newline at end of file diff --git a/examples/basic.jl b/examples/basic.jl deleted file mode 100644 index 3f9a058..0000000 --- a/examples/basic.jl +++ /dev/null @@ -1,12 +0,0 @@ -## THIS EXAMPLE IS OUTDATED! -## TODO: update or remove - -using Spark - -sc = SparkContext(master="local") -path = "file:///var/log/syslog" -txt = text_file(sc, path) -rdd = map_partitions(txt, it -> map(s -> length(split(s)), it)) -count(rdd) -reduce(rdd, +) -close(sc) diff --git a/src/core.jl b/src/core.jl index 2a32c52..5f0b268 100644 --- a/src/core.jl +++ b/src/core.jl @@ -5,6 +5,10 @@ import Statistics # using TableTraits # using IteratorInterfaceExtensions +export SparkSession, DataFrame, GroupedData, Column, Row +export StructType, StructField, DataType +export Window, WindowSpec + include("chainable.jl") include("init.jl") @@ -26,18 +30,16 @@ function __init__() end -# During development we use just include("core.jl") and get a single -# namespace with all the functions. But for the outer world we also -# provide a set of modules mimicing PySpark package layout +# pseudo-modules for some specific functions not exported by default module Compiler using Reexport @reexport import Spark: udf, jcall2, create_instance, create_class end -module SQL - using Reexport - @reexport import Spark: SparkSession, DataFrame, GroupedData, Column, Row - @reexport import Spark: StructType, StructField, DataType - @reexport import Spark: Window, WindowSpec -end \ No newline at end of file +# module SQL +# using Reexport +# @reexport import Spark: SparkSession, DataFrame, GroupedData, Column, Row +# @reexport import Spark: StructType, StructField, DataType +# @reexport import Spark: Window, WindowSpec +# end \ No newline at end of file diff --git a/test/test_compiler.jl b/test/test_compiler.jl index 105f505..5526077 100644 --- a/test/test_compiler.jl +++ b/test/test_compiler.jl @@ -1,4 +1,4 @@ -import Spark.Compiler: jcall2, udf +import Spark: jcall2, udf import Spark.JavaCall: @jimport, jdouble, JString const JDouble = @jimport java.lang.Double diff --git a/test/test_sql.jl b/test/test_sql.jl index 895057a..9e8f5ba 100644 --- a/test/test_sql.jl +++ b/test/test_sql.jl @@ -1,4 +1,4 @@ -using Spark.SQL +using Spark using Spark.Compiler