Initial commit

This commit is contained in:
Mikko Ahlroth 2018-10-15 22:47:05 +03:00
commit 68e705caa3
147 changed files with 5285 additions and 0 deletions

4
.formatter.exs Normal file
View file

@ -0,0 +1,4 @@
# Used by "mix format"
[
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"]
]

32
.gitignore vendored Normal file
View file

@ -0,0 +1,32 @@
# The directory Mix will write compiled artifacts to.
/_build/
# If you run "mix test --cover", coverage assets end up here.
/cover/
# The directory Mix downloads your dependencies sources to.
/deps/
# Where 3rd-party dependencies like ExDoc output generated docs.
/doc/
# Ignore .fetch files in case you like to edit your project deps locally.
/.fetch
# If the VM crashes, it generates a dump, let's ignore it too.
erl_crash.dump
# Also ignore archive artifacts (built via "mix archive.build").
*.ez
# Ignore package tarball (built via "mix hex.build").
mebe_2-*.tar
# Ignore secret configz
/config/*.secret.exs
/data
/.elixir_ls
# Built frontend assets
/priv/static

6
.on-save.json Normal file
View file

@ -0,0 +1,6 @@
[
{
"files": "**/*.{ex,exs}",
"command": "mix format ${srcFile}"
}
]

21
README.md Normal file
View file

@ -0,0 +1,21 @@
# Mebe2
**TODO: Add description**
## Installation
If [available in Hex](https://hex.pm/docs/publish), the package can be installed
by adding `mebe_2` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:mebe_2, "~> 0.1.0"}
]
end
```
Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
be found at [https://hexdocs.pm/mebe_2](https://hexdocs.pm/mebe_2).

73
config/config.exs Normal file
View file

@ -0,0 +1,73 @@
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :mebe_2, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:mebe_2, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
config :mebe_2,
# The path to crawl post and page data from. No trailing slash, use an absolute path.
data_path: Path.expand("data"),
# Port to listen on
port: 2124,
# Basic blog information
blog_name: "My awesome blog",
blog_author: "Author McAuthor",
# Absolute URL to the site, including protocol, no trailing slash
absolute_url: "http://localhost:2124",
# Set to true to show author header from posts, if available (blog_author will be used as default)
multi_author_mode: false,
# If multi author mode is on, use blog_author as default author (if this is false, no author will be set if post has no author header)
use_default_author: true,
# Default timezone to use for posts with time data
time_default_tz: "Europe/Helsinki",
# Force "Read more…" text to display even if there is no more content
force_read_more: false,
# Set to true to enable RSS feeds
enable_feeds: false,
# Show full content in feeds instead of short content
feeds_full_content: false,
posts_per_page: 10,
posts_in_feed: 20,
# Disqus comments
# Use Disqus comments
disqus_comments: false,
# Show comments for pages too
page_commenting: false,
disqus_shortname: "my-awesome-blog",
# Extra HTML that is injected to every page, right before </body>. Useful for analytics scripts.
extra_html: """
<script>
window.tilastokeskus_url = '/tilastokeskus/track';
</script>
<script src="/tilastokeskus/track.js" async></script>
"""
if Mix.env() == :dev do
config :exsync, :extensions, [".ex", ".eex"]
end
# If you wish to compile in secret settings, use the following file. Note that the settings in
# the file will be set at release generation time and cannot be changed later.
if File.exists?("config/config.secret.exs") do
import_config("config.secret.exs")
end

113
lib/engine/crawler.ex Normal file
View file

@ -0,0 +1,113 @@
defmodule Mebe2.Engine.Crawler do
@moduledoc """
The crawler goes through the specified directory, opening and parsing all the matching files
inside concurrently.
"""
require Logger
alias Mebe2.Engine.{Parser, Utils, SlugUtils}
alias Mebe2.Engine.Models.{Page, Post}
def crawl(path) do
get_files(path)
|> Enum.map(fn file -> Task.async(__MODULE__, :parse, [file]) end)
|> handle_responses()
|> construct_archives()
end
def get_files(path) do
path = path <> "/**/*.md"
Logger.info("Searching files using '#{path}' with cwd '#{System.cwd()}'")
files = Path.wildcard(path)
Logger.info("Found files:")
for file <- files do
Logger.info(file)
end
files
end
def parse(file) do
try do
File.read!(file)
|> Parser.parse(Path.basename(file))
rescue
_ ->
Logger.error("Could not parse file #{file}. Exception: #{inspect(__STACKTRACE__)}")
:error
end
end
def handle_responses(tasklist) do
Enum.map(tasklist, fn task -> Task.await(task) end)
end
def construct_archives(datalist) do
Enum.reduce(
datalist,
%{
pages: %{},
posts: [],
years: %{},
months: %{},
tags: %{},
authors: %{},
author_names: %{}
},
fn pagedata, acc ->
case pagedata do
# Ignore pages/posts that could not be parsed
:error ->
acc
%Page{} ->
put_in(acc, [:pages, pagedata.slug], pagedata)
%Post{} ->
{{year, month, _}, _} = Calendar.DateTime.to_erl(pagedata.datetime)
tags =
Enum.reduce(pagedata.tags, acc.tags, fn tag, tagmap ->
posts = Map.get(tagmap, tag, [])
Map.put(tagmap, tag, [pagedata | posts])
end)
{authors, author_names} = form_authors(acc, pagedata)
year_posts = [pagedata | Map.get(acc.years, year, [])]
month_posts = [pagedata | Map.get(acc.months, {year, month}, [])]
%{
acc
| posts: [pagedata | acc.posts],
years: Map.put(acc.years, year, year_posts),
months: Map.put(acc.months, {year, month}, month_posts),
tags: tags,
authors: authors,
author_names: author_names
}
end
end
)
end
defp form_authors(datalist, pagedata) do
multi_author_mode = Mebe2.get_conf(:multi_author_mode)
do_form_authors(multi_author_mode, datalist, pagedata)
end
defp do_form_authors(false, _, _), do: {%{}, %{}}
defp do_form_authors(true, %{authors: authors, author_names: author_names}, pagedata) do
author_name = Utils.get_author(pagedata)
author_slug = SlugUtils.slugify(author_name)
author_posts = [pagedata | Map.get(authors, author_slug, [])]
authors = Map.put(authors, author_slug, author_posts)
# Authors end up with the name that was in the post with the first matching slug
author_names = Map.put_new(author_names, author_slug, author_name)
{authors, author_names}
end
end

258
lib/engine/db.ex Normal file
View file

@ -0,0 +1,258 @@
defmodule Mebe2.Engine.DB do
require Logger
alias Mebe2.Engine.{Utils, SlugUtils, Models}
alias Calendar.DateTime
@moduledoc """
Stuff related to storing the blog data to memory (ETS).
"""
# Table for meta information, like the counts of posts and names
# of authors
@meta_table :mebe2_meta
# Table for storing pages by slug
@page_table :mebe2_pages
# Table for sequential retrieval of posts (for list pages)
@post_table :mebe2_posts
# Table for quick retrieval of single post (with key)
@single_post_table :mebe2_single_posts
# Table for storing posts with tag as first element of key
@tag_table :mebe2_tags
# Table for storing posts by specific authors
@author_table :mebe2_authors
# Table for storing menu data
@menu_table :mebe2_menu
@spec init() :: :ok
def init() do
# Only create tables if they don't exist already
if :ets.info(@meta_table) == :undefined do
:ets.new(@meta_table, [:named_table, :set, :protected, read_concurrency: true])
:ets.new(@page_table, [:named_table, :set, :protected, read_concurrency: true])
:ets.new(@post_table, [:named_table, :ordered_set, :protected, read_concurrency: true])
:ets.new(@single_post_table, [:named_table, :set, :protected, read_concurrency: true])
:ets.new(@tag_table, [:named_table, :ordered_set, :protected, read_concurrency: true])
:ets.new(@menu_table, [:named_table, :ordered_set, :protected, read_concurrency: true])
if Mebe2.get_conf(:multi_author_mode) do
:ets.new(@author_table, [:named_table, :ordered_set, :protected, read_concurrency: true])
end
end
:ok
end
@spec destroy() :: :ok
def destroy() do
:ets.delete_all_objects(@meta_table)
:ets.delete_all_objects(@page_table)
:ets.delete_all_objects(@post_table)
:ets.delete_all_objects(@single_post_table)
:ets.delete_all_objects(@tag_table)
:ok
end
@spec insert_count(:all, integer) :: true
def insert_count(:all, count) do
insert_meta(:all, :all, count)
end
@spec insert_count(atom, String.t() | integer, integer) :: true
def insert_count(type, key, count) do
insert_meta(type, key, count)
end
@spec insert_menu([{String.t(), String.t()}]) :: true
def insert_menu(menu) do
# Format for ETS because it needs a tuple
menu = Enum.map(menu, fn menuitem -> {menuitem.slug, menuitem} end)
:ets.insert(@menu_table, menu)
end
@spec insert_posts([Models.Post.t()]) :: :ok
def insert_posts(posts) do
ordered_posts =
Enum.map(posts, fn post ->
{{year, month, day}, _} = DateTime.to_erl(post.datetime)
{{year, month, day, post.order}, post}
end)
single_posts =
Enum.map(posts, fn post ->
{{year, month, day}, _} = DateTime.to_erl(post.datetime)
{{year, month, day, post.slug}, post}
end)
:ets.insert(@post_table, ordered_posts)
:ets.insert(@single_post_table, single_posts)
if Mebe2.get_conf(:multi_author_mode) do
author_posts =
Enum.filter(posts, fn post -> Map.has_key?(post.extra_headers, "author") end)
|> Enum.map(fn post ->
{{year, month, day}, _} = DateTime.to_erl(post.datetime)
author_slug = Utils.get_author(post) |> SlugUtils.slugify()
{{author_slug, year, month, day, post.order}, post}
end)
:ets.insert(@author_table, author_posts)
end
:ok
end
@spec insert_page(Models.Page.t()) :: true
def insert_page(page) do
:ets.insert(@page_table, {page.slug, page})
end
@spec insert_tag_posts(%{optional(String.t()) => Models.Post.t()}) :: true
def insert_tag_posts(tags) do
tag_posts =
Enum.reduce(Map.keys(tags), [], fn tag, acc ->
Enum.reduce(tags[tag], acc, fn post, inner_acc ->
{{year, month, day}, _} = DateTime.to_erl(post.datetime)
[{{tag, year, month, day, post.order}, post} | inner_acc]
end)
end)
:ets.insert(@tag_table, tag_posts)
end
@spec insert_author_posts(%{optional(String.t()) => Models.Post.t()}) :: true
def insert_author_posts(authors) do
author_posts =
Enum.reduce(Map.keys(authors), [], fn author_slug, acc ->
Enum.reduce(authors[author_slug], acc, fn post, inner_acc ->
{{year, month, day}, _} = DateTime.to_erl(post.datetime)
[{{author_slug, year, month, day, post.order}, post} | inner_acc]
end)
end)
:ets.insert(@author_table, author_posts)
end
@spec insert_author_names(%{optional(String.t()) => String.t()}) :: true
def insert_author_names(author_names_map) do
author_names =
Enum.reduce(Map.keys(author_names_map), [], fn author_slug, acc ->
[{{:author_name, author_slug}, author_names_map[author_slug]} | acc]
end)
:ets.insert(@meta_table, author_names)
end
@spec get_menu() :: [Models.MenuItem.t()]
def get_menu() do
case :ets.match(@menu_table, :"$1") do
[] -> []
results -> format_menu(results)
end
end
@spec get_reg_posts(integer(), integer()) :: [Models.Post.t()]
def get_reg_posts(first, last) do
get_post_list(@post_table, [{:"$1", [], [:"$_"]}], first, last)
end
@spec get_tag_posts(String.t(), integer(), integer()) :: [Models.Post.t()]
def get_tag_posts(tag, first, last) do
get_post_list(@tag_table, [{{{tag, :_, :_, :_, :_}, :"$1"}, [], [:"$_"]}], first, last)
end
@spec get_author_posts(String.t(), integer(), integer()) :: [Models.Post.t()]
def get_author_posts(author_slug, first, last) do
get_post_list(
@author_table,
[{{{author_slug, :_, :_, :_, :_}, :"$1"}, [], [:"$_"]}],
first,
last
)
end
@spec get_year_posts(integer(), integer(), integer()) :: [Models.Post.t()]
def get_year_posts(year, first, last) do
get_post_list(@post_table, [{{{year, :_, :_, :_}, :"$1"}, [], [:"$_"]}], first, last)
end
@spec get_month_posts(integer(), integer(), integer(), integer()) :: [Models.Post.t()]
def get_month_posts(year, month, first, last) do
get_post_list(@post_table, [{{{year, month, :_, :_}, :"$1"}, [], [:"$_"]}], first, last)
end
@spec get_page(String.t()) :: Models.Page.t() | nil
def get_page(slug) do
case :ets.match_object(@page_table, {slug, :"$1"}) do
[{_, page}] -> page
_ -> nil
end
end
@spec get_post(integer(), integer(), integer(), String.t()) :: Models.Post.t() | nil
def get_post(year, month, day, slug) do
case :ets.match_object(@single_post_table, {{year, month, day, slug}, :"$1"}) do
[{_, post}] -> post
_ -> nil
end
end
@spec get_count(:all) :: integer()
def get_count(:all) do
get_count(:all, :all)
end
@spec get_count(atom, :all | integer | String.t()) :: integer()
def get_count(type, key) do
get_meta(type, key, 0)
end
@spec get_author_name(String.t()) :: String.t()
def get_author_name(author_slug) do
get_meta(:author_name, author_slug, author_slug)
end
@spec insert_meta(atom, :all | integer | String.t(), integer | String.t()) :: true
defp insert_meta(type, key, value) do
:ets.insert(@meta_table, {{type, key}, value})
end
@spec get_meta(atom, :all | integer | String.t(), integer | String.t()) :: integer | String.t()
defp get_meta(type, key, default) do
case :ets.match_object(@meta_table, {{type, key}, :"$1"}) do
[{{_, _}, value}] -> value
[] -> default
end
end
# Combine error handling of different post listing functions
@spec get_post_list(atom, [tuple], integer, integer) :: [Models.Post.t()]
defp get_post_list(table, matchspec, first, last) do
case :ets.select_reverse(table, matchspec, first + last) do
:"$end_of_table" ->
[]
{result, _} ->
Enum.split(result, first) |> elem(1) |> ets_to_data()
end
end
# Remove key from data returned from ETS
@spec ets_to_data([{any, any}]) :: any
defp ets_to_data(data) do
for {_, actual} <- data, do: actual
end
# Format menu results (convert [{slug, %MenuItem{}}] to %MenuItem{})
@spec format_menu([[{String.t(), Models.MenuItem.t()}]]) :: [Models.MenuItem.t()]
defp format_menu(results) do
for [{_, result}] <- results, do: result
end
end

27
lib/engine/menuparser.ex Normal file
View file

@ -0,0 +1,27 @@
defmodule Mebe2.Engine.MenuParser do
@moduledoc """
This module handles the parsing of the menu file, which lists the links in the menu bar.
"""
alias Mebe2.Engine.Models.MenuItem
def parse(data_path) do
(data_path <> "/menu")
|> File.read!()
|> split_lines
|> parse_lines
|> Enum.filter(fn item -> item != nil end)
end
defp split_lines(menudata) do
String.split(menudata, ~R/\r?\n/)
end
defp parse_lines(menulines) do
for line <- menulines do
case String.split(line, " ") do
[_] -> nil
[link | rest] -> %MenuItem{slug: link, title: Enum.join(rest, " ")}
end
end
end
end

69
lib/engine/models.ex Normal file
View file

@ -0,0 +1,69 @@
defmodule Mebe2.Engine.Models do
@moduledoc """
This module contains the data models of the blog engine.
"""
defmodule PageData do
defstruct filename: nil,
title: nil,
headers: [],
content: nil
@type t :: %__MODULE__{
filename: String.t(),
title: String.t(),
headers: [{String.t(), String.t()}],
content: String.t()
}
end
defmodule Post do
defstruct slug: nil,
title: nil,
datetime: nil,
time_given: false,
tags: [],
content: nil,
short_content: nil,
order: 0,
has_more: false,
extra_headers: %{}
@type t :: %__MODULE__{
slug: String.t(),
title: String.t(),
datetime: DateTime.t(),
time_given: boolean,
tags: [String.t()],
content: String.t(),
short_content: String.t(),
order: integer,
has_more: boolean,
extra_headers: %{optional(String.t()) => String.t()}
}
end
defmodule Page do
defstruct slug: nil,
title: nil,
content: nil,
extra_headers: %{}
@type t :: %__MODULE__{
slug: String.t(),
title: String.t(),
content: String.t(),
extra_headers: %{optional(String.t()) => String.t()}
}
end
defmodule MenuItem do
defstruct slug: nil,
title: nil
@type t :: %__MODULE__{
slug: String.t(),
title: String.t()
}
end
end

171
lib/engine/parser.ex Normal file
View file

@ -0,0 +1,171 @@
defmodule Mebe2.Engine.Parser do
@moduledoc """
This module contains the parser, which parses page data and returns the contents in the correct format.
"""
alias Mebe2.Engine.Models.{PageData, Page, Post}
@time_re ~R/(?<hours>\d\d):(?<minutes>\d\d)(?: (?<timezone>.*))?/
@earmark_opts %Earmark.Options{
code_class_prefix: "language-"
}
def parse(pagedata, filename) do
split_lines(pagedata)
|> parse_raw(%PageData{filename: filename})
|> render_content()
|> format()
end
def split_lines(pagedata) do
String.split(pagedata, ~R/\r?\n/)
end
def parse_raw(datalines, pagedata \\ %PageData{}, mode \\ :title)
def parse_raw([title | rest], pagedata, :title) do
parse_raw(rest, %{pagedata | title: title}, :headers)
end
def parse_raw(["" | rest], pagedata, :headers) do
# Reverse the headers so they appear in the order that they do in the file
headers = Enum.reverse(pagedata.headers)
parse_raw(rest, %{pagedata | headers: headers}, :content)
end
def parse_raw([header | rest], pagedata, :headers) do
headers = [header | pagedata.headers]
parse_raw(rest, %{pagedata | headers: headers}, :headers)
end
def parse_raw(content, pagedata, :content) when is_list(content) do
%{pagedata | content: Enum.join(content, "\n")}
end
def render_content(pagedata) do
%{pagedata | content: Earmark.as_html!(pagedata.content, @earmark_opts)}
end
def format(%PageData{
filename: filename,
title: title,
headers: headers,
content: content
}) do
case Regex.run(~R/^(?:(\d{4})-(\d{2})-(\d{2})(?:-(\d{2}))?-)?(.*?).md$/iu, filename) do
# Pages do not have any date information
[_, "", "", "", "", slug] ->
%Page{
slug: slug,
title: title,
content: content,
extra_headers: parse_headers(headers)
}
[_, year, month, day, order, slug] ->
{tags, extra_headers} = split_tags(headers)
extra_headers = parse_headers(extra_headers)
order = format_order(order)
split_content = String.split(content, ~R/<!--\s*SPLIT\s*-->/u)
date_erl = date_to_int_tuple({year, month, day})
date = Date.from_erl!(date_erl)
time_header = Map.get(extra_headers, "time", nil)
{time, tz} = parse_time(time_header)
datetime = Calendar.DateTime.from_date_and_time_and_zone!(date, time, tz)
%Post{
slug: slug,
title: title,
datetime: datetime,
time_given: time_header != nil,
tags: parse_tags(tags),
content: content,
short_content: hd(split_content),
order: order,
has_more: Enum.count(split_content) > 1,
extra_headers: extra_headers
}
end
end
defp parse_headers(headers) do
# Parse a list of headers into a string keyed map
Enum.reduce(headers, %{}, fn header, acc ->
{key, val} = split_header(header)
Map.put(acc, key, val)
end)
end
defp split_header(header) do
# Enforce 2 parts
[key | [val]] = String.split(header, ":", parts: 2)
{String.trim(key), String.trim(val)}
end
# Split tags from top of headers
defp split_tags([]), do: {"", []}
defp split_tags([tags | headers]), do: {tags, headers}
defp parse_tags(tagline) do
case String.split(tagline, ~R/,\s*/iu) do
[""] -> []
list -> list
end
end
# Parse time data from time header
defp parse_time(nil), do: form_time(nil)
defp parse_time(time_header) when is_binary(time_header) do
with %{"hours" => h, "minutes" => m, "timezone" => tz} <-
Regex.named_captures(@time_re, time_header) do
tz =
if tz == "" do
Mebe2.get_conf(:time_default_tz)
else
tz
end
form_time({str_to_int(h), str_to_int(m)}, tz)
else
_ -> form_time(nil)
end
end
# Form time and timezone from given time parts
def form_time(nil) do
# If not given, time is midnight (RSS feeds require a time)
{Time.from_erl!({0, 0, 0}), Mebe2.get_conf(:time_default_tz)}
end
def form_time({hours, minutes}, tz) do
{Time.from_erl!({hours, minutes, 0}), tz}
end
defp date_to_int_tuple({year, month, day}) do
{
str_to_int(year),
str_to_int(month),
str_to_int(day)
}
end
defp str_to_int("00"), do: 0
defp str_to_int(str) when is_binary(str) do
{int, _} =
String.trim_leading(str, "0")
|> Integer.parse()
int
end
defp format_order(""), do: 0
defp format_order(order), do: str_to_int(order)
end

25
lib/engine/slug_utils.ex Normal file
View file

@ -0,0 +1,25 @@
defmodule Mebe2.Engine.SlugUtils do
@moduledoc """
Utilities related to handling of slugs.
"""
alias Mebe2.Engine.DB
@doc """
Get slug out of a given value.
Nil is returned as is.
"""
def slugify(nil), do: nil
def slugify(value) do
Slugger.slugify_downcase(value)
end
@doc """
Get the author name related to this slug from the DB.
"""
def unslugify_author(slug) do
DB.get_author_name(slug)
end
end

44
lib/engine/utils.ex Normal file
View file

@ -0,0 +1,44 @@
defmodule Mebe2.Engine.Utils do
@moduledoc """
This module contains functions and other stuff that just don't fit anywhere else properly.
"""
alias Mebe2.Engine.Models
@doc """
Get the author of a post.
Returns a value according to the following pseudocode
if multi author mode is on then
if post has author then
return post's author
else if use default author is on then
return blog author
else return nil
else if use default author is on then
return blog author
else return nil
"""
@spec get_author(Models.Post.t()) :: String.t() | nil
def get_author(%Models.Post{extra_headers: extra_headers}) do
multi_author_mode = Mebe2.get_conf(:multi_author_mode)
use_default_author = Mebe2.get_conf(:use_default_author)
blog_author = Mebe2.get_conf(:blog_author)
if multi_author_mode do
cond do
Map.has_key?(extra_headers, "author") ->
Map.get(extra_headers, "author")
use_default_author ->
blog_author
true ->
nil
end
else
if use_default_author, do: blog_author, else: nil
end
end
end

87
lib/engine/worker.ex Normal file
View file

@ -0,0 +1,87 @@
defmodule Mebe2.Engine.Worker do
@moduledoc """
This worker initializes the post database and keeps it alive while the server is running.
"""
use GenServer
require Logger
alias Mebe2.Engine.{Crawler, DB, MenuParser}
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, :ok, opts)
end
def init(:ok) do
load_db()
{:ok, nil}
end
def handle_call(:refresh, _from, nil) do
refresh_db()
{:reply, :ok, nil}
end
def refresh_db() do
Logger.info("Destroying database…")
DB.destroy()
Logger.info("Reloading database…")
load_db()
Logger.info("Update done!")
end
@doc """
Initialize the database by crawling the configured path and parsing data to the DB.
"""
def load_db() do
data_path = Mebe2.get_conf(:data_path)
Logger.info("Loading menu from '#{data_path}/menu'…")
menu = MenuParser.parse(data_path)
Logger.info("Loading post database from '#{data_path}'…")
%{
pages: pages,
posts: posts,
tags: tags,
authors: authors,
author_names: author_names,
years: years,
months: months
} = Crawler.crawl(data_path)
Logger.info("Loaded #{Enum.count(pages)} pages and #{Enum.count(posts)} posts.")
DB.init()
DB.insert_menu(menu)
DB.insert_posts(posts)
DB.insert_count(:all, Enum.count(posts))
Enum.each(Map.keys(pages), fn page -> DB.insert_page(pages[page]) end)
DB.insert_tag_posts(tags)
Enum.each(Map.keys(tags), fn tag -> DB.insert_count(:tag, tag, Enum.count(tags[tag])) end)
if Mebe2.get_conf(:multi_author_mode) do
DB.insert_author_posts(authors)
DB.insert_author_names(author_names)
Enum.each(Map.keys(authors), fn author ->
DB.insert_count(:author, author, Enum.count(authors[author]))
end)
end
# For years and months, only insert the counts (the data can be fetched from main posts table)
Enum.each(Map.keys(years), fn year ->
DB.insert_count(:year, year, Enum.count(years[year]))
end)
Enum.each(Map.keys(months), fn month ->
DB.insert_count(:month, month, Enum.count(months[month]))
end)
Logger.info("Posts loaded.")
end
end

63
lib/mebe_2.ex Normal file
View file

@ -0,0 +1,63 @@
defmodule Mebe2 do
@moduledoc """
Documentation for Mebe2.
"""
@conf_datatypes %{
multi_author_mode: :bool,
use_default_author: :bool,
force_read_more: :bool,
enable_feeds: :bool,
feeds_full_content: :bool,
posts_per_page: :int,
posts_in_feed: :int,
disqus_comments: :bool,
page_commenting: :bool,
port: :int,
host_port: :int
}
@doc """
Get a configuration setting.
Gets setting from env vars (same name as setting, but with ALL_CAPS), and uses
Application.get_env as backup. If setting has a datatype conversion defined above, that will
be used to map the return datatype. Otherwise return value will be string (if from env var) or
any() (if from config file).
"""
@spec get_conf(atom()) :: any()
def get_conf(key) do
val =
case key |> Atom.to_string() |> String.upcase() |> System.get_env() do
nil -> Application.get_env(:mebe_2, key)
val -> val
end
case Map.get(@conf_datatypes, key) do
nil ->
val
atom ->
fun = "get_#{Atom.to_string(atom)}!" |> String.to_atom()
apply(__MODULE__, fun, [val])
end
end
@doc """
Get boolean from env value, strings ("true", "false") or booleans are accepted as, others will
raise.
"""
@spec get_bool!(atom() | String.t()) :: boolean()
def get_bool!(val) when is_boolean(val), do: val
def get_bool!("true"), do: true
def get_bool!("false"), do: false
def get_bool!(val), do: raise("'#{inspect(val)}' is invalid value for boolean.")
@doc """
Get integer from env value, integer strings and integers are accepted, others will raise.
"""
@spec get_int!(integer() | String.t()) :: integer()
def get_int!(val) when is_integer(val), do: val
def get_int!(val) when is_binary(val), do: String.to_integer(val)
def get_int!(val), do: raise("'#{inspect(val)}' is invalid value for integer.")
end

30
lib/mebe_2/application.ex Normal file
View file

@ -0,0 +1,30 @@
defmodule Mebe2.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
port = Mebe2.get_conf(:port)
case Code.ensure_loaded(ExSync) do
{:module, ExSync = mod} ->
mod.start()
{:error, :nofile} ->
:ok
end
# List all child processes to be supervised
children = [
Mebe2.Engine.Worker.child_spec(name: Mebe2.Engine.Worker),
{Mebe2.Web.Router, [[], [port: port]]}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Mebe2.Supervisor]
Supervisor.start_link(children, opts)
end
end

View file

@ -0,0 +1,12 @@
defmodule Mix.Tasks.Mebe.Clean do
use MBU.BuildTask, auto_path: false, create_out_path: false
require Logger
@shortdoc "Clean frontend build artifacts"
task _ do
static_path = File.cwd!() |> Path.join("priv") |> Path.join("static")
Logger.debug("Cleaning path #{static_path}...")
File.rm_rf!(static_path)
end
end

View file

@ -0,0 +1,19 @@
defmodule Mix.Tasks.Mebe.Serve do
use MBU.BuildTask, auto_path: false, create_out_path: false
import MBU.TaskUtils
@shortdoc "Start Mebe2 server and frontend development tools"
@deps ["mebe.clean"]
task _ do
frontend_path = Path.join([File.cwd!(), "lib", "web", "frontend"])
[
exec(System.find_executable("bsb"), ["-w"], name: "ocaml", cd: frontend_path),
exec(System.find_executable("node"), ["fuse"], name: "fusebox", cd: frontend_path),
exec(System.find_executable("mix"), ["run", "--no-halt"])
]
|> listen(watch: true)
end
end

29
lib/web/frontend/.gitignore vendored Normal file
View file

@ -0,0 +1,29 @@
*.exe
*.obj
*.out
*.compile
*.native
*.byte
*.cmo
*.annot
*.cmi
*.cmx
*.cmt
*.cmti
*.cma
*.a
*.cmxa
*.obj
*~
*.annot
*.cmj
*.bak
lib/bs
*.mlast
*.mliast
.vscode
.fusebox
.merlin
.bsb.lock
*.bs.js
/node_modules

View file

@ -0,0 +1,16 @@
# Build
```
npm run build
```
# Watch
```
npm run watch
```
# Editor
If you use `vscode`, Press `Windows + Shift + B` it will build automatically

View file

@ -0,0 +1,18 @@
{
"name": "frontend",
"version": "0.1.0",
"sources": {
"dir": "src",
"subdirs": true
},
"package-specs": {
"module": "commonjs",
"in-source": true
},
"suffix": ".bs.js",
"bs-dependencies": [],
"warnings": {
"error": "+101"
},
"refmt": 3
}

43
lib/web/frontend/fuse.js Normal file
View file

@ -0,0 +1,43 @@
const { FuseBox, QuantumPlugin, SassPlugin, CSSPlugin, CSSResourcePlugin } = require("fuse-box");
const DIST_PATH = '../../../priv/static';
const IS_PRODUCTION = process.env.NODE_ENV === 'production';
const fuse = FuseBox.init({
homeDir: "src",
output: `${DIST_PATH}/$name.js`,
target: "browser@es5",
plugins: [
[
SassPlugin(),
CSSResourcePlugin({ dist: `${DIST_PATH}/css-resources` }),
CSSPlugin(),
],
IS_PRODUCTION && QuantumPlugin({
bakeApiIntoBundle: 'app',
uglify: true,
css: true,
})
]
});
if (!IS_PRODUCTION) {
fuse.dev({
port: 2125,
proxy: {
'/': {
target: 'http://127.0.0.1:2124',
changeOrigin: true
}
}
});
}
const app = fuse.bundle('app').instructions(`> index.bs.js + style/index.scss`);
if (!IS_PRODUCTION) {
app.hmr({ reload: true }).watch();
}
fuse.run();

View file

@ -0,0 +1,26 @@
{
"name": "frontend",
"version": "0.1.0",
"scripts": {
"clean": "bsb -clean-world",
"build": "bsb -make-world",
"watch": "bsb -make-world -w"
},
"keywords": [
"BuckleScript"
],
"author": "",
"license": "MIT",
"devDependencies": {
"bs-platform": "^4.0.5",
"fuse-box": "^3.5.0",
"http-proxy-middleware": "^0.19.0",
"node-sass": "^4.9.3",
"typescript": "^3.0.3",
"uglify-js": "^3.4.9"
},
"dependencies": {
"@csstools/normalize.css": "^9.0.1",
"prismjs": "^1.15.0"
}
}

View file

@ -0,0 +1,7 @@
(* Code block highlighting in blog, with PrismJS. *)
[%raw {|
require('prismjs'),
require('prismjs/components/prism-elixir'),
require('prismjs/components/prism-bash'),
require('prismjs/components/prism-c')
|}]

View file

@ -0,0 +1,93 @@
Copyright (c) 2012-2015, The Mozilla Foundation and Telefonica S.A.
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This license is copied below, and is also available with a FAQ at:
http://scripts.sil.org/OFL
-----------------------------------------------------------
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
-----------------------------------------------------------
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide
development of collaborative font projects, to support the font creation
efforts of academic and linguistic communities, and to provide a free and
open framework in which fonts may be shared and improved in partnership
with others.
The OFL allows the licensed fonts to be used, studied, modified and
redistributed freely as long as they are not sold by themselves. The
fonts, including any derivative works, can be bundled, embedded,
redistributed and/or sold with any software provided that any reserved
names are not used by derivative works. The fonts and derivatives,
however, cannot be released under any other type of license. The
requirement for fonts to remain under this license does not apply
to any document created using the fonts or their derivatives.
DEFINITIONS
"Font Software" refers to the set of files released by the Copyright
Holder(s) under this license and clearly marked as such. This may
include source files, build scripts and documentation.
"Reserved Font Name" refers to any names specified as such after the
copyright statement(s).
"Original Version" refers to the collection of Font Software components as
distributed by the Copyright Holder(s).
"Modified Version" refers to any derivative made by adding to, deleting,
or substituting -- in part or in whole -- any of the components of the
Original Version, by changing formats or by porting the Font Software to a
new environment.
"Author" refers to any designer, engineer, programmer, technical
writer or other person who contributed to the Font Software.
PERMISSION & CONDITIONS
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Font Software, to use, study, copy, merge, embed, modify,
redistribute, and sell modified and unmodified copies of the Font
Software, subject to the following conditions:
1) Neither the Font Software nor any of its individual components,
in Original or Modified Versions, may be sold by itself.
2) Original or Modified Versions of the Font Software may be bundled,
redistributed and/or sold with any software, provided that each copy
contains the above copyright notice and this license. These can be
included either as stand-alone text files, human-readable headers or
in the appropriate machine-readable metadata fields within text or
binary files as long as those fields can be easily viewed by the user.
3) No Modified Version of the Font Software may use the Reserved Font
Name(s) unless explicit written permission is granted by the corresponding
Copyright Holder. This restriction only applies to the primary font name as
presented to the users.
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
Software shall not be used to promote, endorse or advertise any
Modified Version, except to acknowledge the contribution(s) of the
Copyright Holder(s) and the Author(s) or with their explicit written
permission.
5) The Font Software, modified or unmodified, in part or in whole,
must be distributed entirely under this license, and must not be
distributed under any other license. The requirement for fonts to
remain under this license does not apply to any document created
using the Font Software.
TERMINATION
This license becomes null and void if any of the above conditions are
not met.
DISCLAIMER
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View file

@ -0,0 +1,93 @@
Copyright (c) 2010-2014 by tyPoland Lukasz Dziedzic (team@latofonts.com) with Reserved Font Name "Lato"
This Font Software is licensed under the SIL Open Font License, Version 1.1.
This license is copied below, and is also available with a FAQ at:
http://scripts.sil.org/OFL
-----------------------------------------------------------
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007
-----------------------------------------------------------
PREAMBLE
The goals of the Open Font License (OFL) are to stimulate worldwide
development of collaborative font projects, to support the font creation
efforts of academic and linguistic communities, and to provide a free and
open framework in which fonts may be shared and improved in partnership
with others.
The OFL allows the licensed fonts to be used, studied, modified and
redistributed freely as long as they are not sold by themselves. The
fonts, including any derivative works, can be bundled, embedded,
redistributed and/or sold with any software provided that any reserved
names are not used by derivative works. The fonts and derivatives,
however, cannot be released under any other type of license. The
requirement for fonts to remain under this license does not apply
to any document created using the fonts or their derivatives.
DEFINITIONS
"Font Software" refers to the set of files released by the Copyright
Holder(s) under this license and clearly marked as such. This may
include source files, build scripts and documentation.
"Reserved Font Name" refers to any names specified as such after the
copyright statement(s).
"Original Version" refers to the collection of Font Software components as
distributed by the Copyright Holder(s).
"Modified Version" refers to any derivative made by adding to, deleting,
or substituting -- in part or in whole -- any of the components of the
Original Version, by changing formats or by porting the Font Software to a
new environment.
"Author" refers to any designer, engineer, programmer, technical
writer or other person who contributed to the Font Software.
PERMISSION & CONDITIONS
Permission is hereby granted, free of charge, to any person obtaining
a copy of the Font Software, to use, study, copy, merge, embed, modify,
redistribute, and sell modified and unmodified copies of the Font
Software, subject to the following conditions:
1) Neither the Font Software nor any of its individual components,
in Original or Modified Versions, may be sold by itself.
2) Original or Modified Versions of the Font Software may be bundled,
redistributed and/or sold with any software, provided that each copy
contains the above copyright notice and this license. These can be
included either as stand-alone text files, human-readable headers or
in the appropriate machine-readable metadata fields within text or
binary files as long as those fields can be easily viewed by the user.
3) No Modified Version of the Font Software may use the Reserved Font
Name(s) unless explicit written permission is granted by the corresponding
Copyright Holder. This restriction only applies to the primary font name as
presented to the users.
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font
Software shall not be used to promote, endorse or advertise any
Modified Version, except to acknowledge the contribution(s) of the
Copyright Holder(s) and the Author(s) or with their explicit written
permission.
5) The Font Software, modified or unmodified, in part or in whole,
must be distributed entirely under this license, and must not be
distributed under any other license. The requirement for fonts to
remain under this license does not apply to any document created
using the Font Software.
TERMINATION
This license becomes null and void if any of the above conditions are
not met.
DISCLAIMER
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM
OTHER DEALINGS IN THE FONT SOFTWARE.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show more