Häxfest 2015!!!!!
This commit is contained in:
parent
8d0806f6f0
commit
559a063c3d
42 changed files with 732 additions and 987 deletions
19
README.md
19
README.md
|
@ -1,8 +1,19 @@
|
||||||
# Katso
|
# Katso
|
||||||
|
|
||||||
To start your new Phoenix application:
|
To start your Phoenix app:
|
||||||
|
|
||||||
1. Install dependencies with `mix deps.get`
|
1. Install dependencies with `mix deps.get`
|
||||||
2. Start Phoenix endpoint with `mix phoenix.server`
|
2. Create and migrate your database with `mix ecto.create && mix ecto.migrate`
|
||||||
|
3. Start Phoenix endpoint with `mix phoenix.server`
|
||||||
|
|
||||||
Now you can visit `localhost:4000` from your browser.
|
Now you can visit [`localhost:4000`](http://localhost:4000) from your browser.
|
||||||
|
|
||||||
|
Ready to run in production? Please [check our deployment guides](http://www.phoenixframework.org/docs/deployment).
|
||||||
|
|
||||||
|
## Learn more
|
||||||
|
|
||||||
|
* Official website: http://www.phoenixframework.org/
|
||||||
|
* Guides: http://phoenixframework.org/docs/overview
|
||||||
|
* Docs: http://hexdocs.pm/phoenix
|
||||||
|
* Mailing list: http://groups.google.com/group/phoenix-talk
|
||||||
|
* Source: https://github.com/phoenixframework/phoenix
|
||||||
|
|
|
@ -2,28 +2,48 @@ exports.config = {
|
||||||
// See http://brunch.io/#documentation for docs.
|
// See http://brunch.io/#documentation for docs.
|
||||||
files: {
|
files: {
|
||||||
javascripts: {
|
javascripts: {
|
||||||
joinTo: 'js/app.js'
|
// To use a separate vendor.js bundle, specify two files path
|
||||||
// To change the order of concatenation of files, explictly mention here
|
// https://github.com/brunch/brunch/blob/stable/docs/config.md#files
|
||||||
// https://github.com/brunch/brunch/tree/stable/docs#concatenation
|
joinTo: {
|
||||||
|
"js/app.js": /^(web\/static\/js\/app.js)$/,
|
||||||
|
"js/jquery.js": /^(node_modules\/jquery\/dist\/jquery\.js)$/,
|
||||||
|
"js/dygraph.js": /^(node_modules\/dygraphs\/dygraph-combined-dev\.js)$/,
|
||||||
|
"js/vendor.js": /^(web\/static\/vendor)/
|
||||||
|
}
|
||||||
|
//
|
||||||
|
// To change the order of concatenation of files, explicitly mention here
|
||||||
|
// https://github.com/brunch/brunch/tree/master/docs#concatenation
|
||||||
// order: {
|
// order: {
|
||||||
// before: [
|
// before: [
|
||||||
// 'web/static/vendor/js/jquery-2.1.1.js',
|
// "web/static/vendor/js/jquery-2.1.1.js",
|
||||||
// 'web/static/vendor/js/bootstrap.min.js'
|
// "web/static/vendor/js/bootstrap.min.js"
|
||||||
// ]
|
// ]
|
||||||
// }
|
// }
|
||||||
},
|
},
|
||||||
stylesheets: {
|
stylesheets: {
|
||||||
joinTo: 'css/app.css'
|
joinTo: "css/app.css"
|
||||||
},
|
},
|
||||||
templates: {
|
templates: {
|
||||||
joinTo: 'js/app.js'
|
joinTo: "js/app.js"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
|
conventions: {
|
||||||
|
// This option sets where we should place non-css and non-js assets in.
|
||||||
|
// By default, we set this to "/web/static/assets". Files in this directory
|
||||||
|
// will be copied to `paths.public`, which is "priv/static" by default.
|
||||||
|
assets: /^(web\/static\/assets)/
|
||||||
|
},
|
||||||
|
|
||||||
// Phoenix paths configuration
|
// Phoenix paths configuration
|
||||||
paths: {
|
paths: {
|
||||||
// Which directories to watch
|
// Dependencies and current project directories to watch
|
||||||
watched: ["web/static", "test/static"],
|
watched: [
|
||||||
|
"deps/phoenix/web/static",
|
||||||
|
"deps/phoenix_html/web/static",
|
||||||
|
"web/static",
|
||||||
|
"test/static"
|
||||||
|
],
|
||||||
|
|
||||||
// Where to compile files to
|
// Where to compile files to
|
||||||
public: "priv/static"
|
public: "priv/static"
|
||||||
|
@ -31,9 +51,19 @@ exports.config = {
|
||||||
|
|
||||||
// Configure your plugins
|
// Configure your plugins
|
||||||
plugins: {
|
plugins: {
|
||||||
ES6to5: {
|
babel: {
|
||||||
// Do not use ES6 compiler in vendor code
|
// Do not use ES6 compiler in vendor code
|
||||||
ignore: [/^(web\/static\/vendor)/]
|
ignore: [/web\/static\/vendor/]
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
modules: {
|
||||||
|
autoRequire: {
|
||||||
|
"js/app.js": ["web/static/js/app"]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
npm: {
|
||||||
|
enabled: true
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -8,9 +8,9 @@ use Mix.Config
|
||||||
# Configures the endpoint
|
# Configures the endpoint
|
||||||
config :katso, Katso.Endpoint,
|
config :katso, Katso.Endpoint,
|
||||||
url: [host: "localhost"],
|
url: [host: "localhost"],
|
||||||
root: Path.expand("..", __DIR__),
|
root: Path.dirname(__DIR__),
|
||||||
secret_key_base: "AwltP5KYMmwNj+7/UP47rHeKMX16cxP7uv0Csr+PoIZEFN2o090mzVvLbcvJeld1",
|
secret_key_base: "ikWnS0ccKDnDW56giwdyvAS873ofbegfEAuSbDKCvFzBa9Kz12/lSAPUiCqzC5Ec",
|
||||||
debug_errors: false,
|
render_errors: [accepts: ~w(html json)],
|
||||||
pubsub: [name: Katso.PubSub,
|
pubsub: [name: Katso.PubSub,
|
||||||
adapter: Phoenix.PubSub.PG2]
|
adapter: Phoenix.PubSub.PG2]
|
||||||
|
|
||||||
|
@ -22,3 +22,8 @@ config :logger, :console,
|
||||||
# Import environment specific config. This must remain at the bottom
|
# Import environment specific config. This must remain at the bottom
|
||||||
# of this file so it overrides the configuration defined above.
|
# of this file so it overrides the configuration defined above.
|
||||||
import_config "#{Mix.env}.exs"
|
import_config "#{Mix.env}.exs"
|
||||||
|
|
||||||
|
# Configure phoenix generators
|
||||||
|
config :phoenix, :generators,
|
||||||
|
migration: true,
|
||||||
|
binary_id: false
|
||||||
|
|
|
@ -11,13 +11,14 @@ config :katso, Katso.Endpoint,
|
||||||
debug_errors: true,
|
debug_errors: true,
|
||||||
code_reloader: true,
|
code_reloader: true,
|
||||||
cache_static_lookup: false,
|
cache_static_lookup: false,
|
||||||
watchers: [node: ["node_modules/brunch/bin/brunch", "watch"]]
|
check_origin: false,
|
||||||
|
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin"]]
|
||||||
|
|
||||||
# Watch static and templates for browser reloading.
|
# Watch static and templates for browser reloading.
|
||||||
config :katso, Katso.Endpoint,
|
config :katso, Katso.Endpoint,
|
||||||
live_reload: [
|
live_reload: [
|
||||||
patterns: [
|
patterns: [
|
||||||
~r{priv/static/.*(js|css|png|jpeg|jpg|gif)$},
|
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
|
||||||
~r{web/views/.*(ex)$},
|
~r{web/views/.*(ex)$},
|
||||||
~r{web/templates/.*(eex)$}
|
~r{web/templates/.*(eex)$}
|
||||||
]
|
]
|
||||||
|
@ -26,10 +27,16 @@ config :katso, Katso.Endpoint,
|
||||||
# Do not include metadata nor timestamps in development logs
|
# Do not include metadata nor timestamps in development logs
|
||||||
config :logger, :console, format: "[$level] $message\n"
|
config :logger, :console, format: "[$level] $message\n"
|
||||||
|
|
||||||
|
# Set a higher stacktrace during development.
|
||||||
|
# Do not configure such in production as keeping
|
||||||
|
# and calculating stacktraces is usually expensive.
|
||||||
|
config :phoenix, :stacktrace_depth, 20
|
||||||
|
|
||||||
# Configure your database
|
# Configure your database
|
||||||
config :katso, Katso.Repo,
|
config :katso, Katso.Repo,
|
||||||
adapter: Ecto.Adapters.Postgres,
|
adapter: Ecto.Adapters.Postgres,
|
||||||
username: "katso",
|
username: "postgres",
|
||||||
password: "katso",
|
password: "postgres",
|
||||||
database: "katso",
|
database: "katso_dev",
|
||||||
hostname: "localhost"
|
hostname: "localhost",
|
||||||
|
pool_size: 10
|
||||||
|
|
|
@ -6,26 +6,42 @@ use Mix.Config
|
||||||
#
|
#
|
||||||
# You should also configure the url host to something
|
# You should also configure the url host to something
|
||||||
# meaningful, we use this information when generating URLs.
|
# meaningful, we use this information when generating URLs.
|
||||||
|
#
|
||||||
|
# Finally, we also include the path to a manifest
|
||||||
|
# containing the digested version of static files. This
|
||||||
|
# manifest is generated by the mix phoenix.digest task
|
||||||
|
# which you typically run after static files are built.
|
||||||
config :katso, Katso.Endpoint,
|
config :katso, Katso.Endpoint,
|
||||||
http: [port: {:system, "PORT"}],
|
http: [port: {:system, "PORT"}],
|
||||||
url: [host: "example.com"]
|
url: [host: "example.com", port: 80],
|
||||||
|
cache_static_manifest: "priv/static/manifest.json"
|
||||||
|
|
||||||
|
# Do not print debug messages in production
|
||||||
|
config :logger, level: :info
|
||||||
|
|
||||||
# ## SSL Support
|
# ## SSL Support
|
||||||
#
|
#
|
||||||
# To get SSL working, you will need to add the `https` key
|
# To get SSL working, you will need to add the `https` key
|
||||||
# to the previous section:
|
# to the previous section and set your `:url` port to 443:
|
||||||
#
|
#
|
||||||
# config:katso, Katso.Endpoint,
|
# config :katso, Katso.Endpoint,
|
||||||
# ...
|
# ...
|
||||||
|
# url: [host: "example.com", port: 443],
|
||||||
# https: [port: 443,
|
# https: [port: 443,
|
||||||
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
|
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
|
||||||
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
|
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
|
||||||
#
|
#
|
||||||
# Where those two env variables point to a file on
|
# Where those two env variables return an absolute path to
|
||||||
# disk for the key and cert.
|
# the key and cert in disk or a relative path inside priv,
|
||||||
|
# for example "priv/ssl/server.key".
|
||||||
# Do not print debug messages in production
|
#
|
||||||
config :logger, level: :info
|
# We also recommend setting `force_ssl`, ensuring no data is
|
||||||
|
# ever sent via http, always redirecting to https:
|
||||||
|
#
|
||||||
|
# config :katso, Katso.Endpoint,
|
||||||
|
# force_ssl: [hsts: true]
|
||||||
|
#
|
||||||
|
# Check `Plug.SSL` for all available options in `force_ssl`.
|
||||||
|
|
||||||
# ## Using releases
|
# ## Using releases
|
||||||
#
|
#
|
||||||
|
|
|
@ -4,11 +4,12 @@ use Mix.Config
|
||||||
# you likely want to automate and keep it away from
|
# you likely want to automate and keep it away from
|
||||||
# your version control system.
|
# your version control system.
|
||||||
config :katso, Katso.Endpoint,
|
config :katso, Katso.Endpoint,
|
||||||
secret_key_base: "Pqf8JsBMaVsywaKVbsJFtsPPySK94cWLrvEgwENe37SBW5EiDO4J3F7AaZ5luBCY"
|
secret_key_base: "vPhy4euw5ZRCoD6Oh9apXEuvyGSxY8v6mXQOummG970OnVYxFfPmQ50uHAYIqErk"
|
||||||
|
|
||||||
# Configure your database
|
# Configure your database
|
||||||
config :katso, Katso.Repo,
|
config :katso, Katso.Repo,
|
||||||
adapter: Ecto.Adapters.Postgres,
|
adapter: Ecto.Adapters.Postgres,
|
||||||
username: "postgres",
|
username: "postgres",
|
||||||
password: "postgres",
|
password: "postgres",
|
||||||
database: "katso_prod"
|
database: "katso_prod",
|
||||||
|
pool_size: 20
|
||||||
|
|
|
@ -15,5 +15,5 @@ config :katso, Katso.Repo,
|
||||||
username: "postgres",
|
username: "postgres",
|
||||||
password: "postgres",
|
password: "postgres",
|
||||||
database: "katso_test",
|
database: "katso_test",
|
||||||
size: 1,
|
hostname: "localhost",
|
||||||
max_overflow: false
|
pool: Ecto.Adapters.SQL.Sandbox
|
||||||
|
|
|
@ -1,18 +1,25 @@
|
||||||
defmodule Katso.Endpoint do
|
defmodule Katso.Endpoint do
|
||||||
use Phoenix.Endpoint, otp_app: :katso
|
use Phoenix.Endpoint, otp_app: :katso
|
||||||
|
|
||||||
# Serve at "/" the given assets from "priv/static" directory
|
socket "/socket", Katso.UserSocket
|
||||||
|
|
||||||
|
# Serve at "/" the static files from "priv/static" directory.
|
||||||
|
#
|
||||||
|
# You should set gzip to true if you are running phoenix.digest
|
||||||
|
# when deploying your static files in production.
|
||||||
plug Plug.Static,
|
plug Plug.Static,
|
||||||
at: "/", from: :katso,
|
at: "/", from: :katso, gzip: false,
|
||||||
only: ~w(css images js favicon.ico robots.txt)
|
only: ~w(css fonts images js favicon.ico robots.txt)
|
||||||
|
|
||||||
# Code reloading can be explicitly enabled under the
|
# Code reloading can be explicitly enabled under the
|
||||||
# :code_reloader configuration of your endpoint.
|
# :code_reloader configuration of your endpoint.
|
||||||
if code_reloading? do
|
if code_reloading? do
|
||||||
|
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
|
||||||
plug Phoenix.LiveReloader
|
plug Phoenix.LiveReloader
|
||||||
plug Phoenix.CodeReloader
|
plug Phoenix.CodeReloader
|
||||||
end
|
end
|
||||||
|
|
||||||
|
plug Plug.RequestId
|
||||||
plug Plug.Logger
|
plug Plug.Logger
|
||||||
|
|
||||||
plug Plug.Parsers,
|
plug Plug.Parsers,
|
||||||
|
@ -26,8 +33,7 @@ defmodule Katso.Endpoint do
|
||||||
plug Plug.Session,
|
plug Plug.Session,
|
||||||
store: :cookie,
|
store: :cookie,
|
||||||
key: "_katso_key",
|
key: "_katso_key",
|
||||||
signing_salt: "hybxwdCF",
|
signing_salt: "YAd3yiv6"
|
||||||
encryption_salt: "rGum4O3j"
|
|
||||||
|
|
||||||
plug :router, Katso.Router
|
plug Katso.Router
|
||||||
end
|
end
|
||||||
|
|
|
@ -10,6 +10,7 @@ defmodule Katso.PageAnalyzer do
|
||||||
alias Katso.FetchScore
|
alias Katso.FetchScore
|
||||||
alias Katso.Title
|
alias Katso.Title
|
||||||
alias Katso.TitleScore
|
alias Katso.TitleScore
|
||||||
|
alias Katso.Series
|
||||||
|
|
||||||
import Ecto.Query, only: [from: 2]
|
import Ecto.Query, only: [from: 2]
|
||||||
|
|
||||||
|
@ -18,7 +19,8 @@ defmodule Katso.PageAnalyzer do
|
||||||
name: "Iltalehti",
|
name: "Iltalehti",
|
||||||
url: "http://www.iltalehti.fi/",
|
url: "http://www.iltalehti.fi/",
|
||||||
rules: [
|
rules: [
|
||||||
".otsikko"
|
".df-blk",
|
||||||
|
"p.even a"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -319,6 +321,8 @@ defmodule Katso.PageAnalyzer do
|
||||||
end
|
end
|
||||||
|
|
||||||
def store_data(data) do
|
def store_data(data) do
|
||||||
|
series = create_series
|
||||||
|
|
||||||
Enum.each data, fn {site_key, scores} ->
|
Enum.each data, fn {site_key, scores} ->
|
||||||
query = from m in Magazine,
|
query = from m in Magazine,
|
||||||
where: m.key == ^(Atom.to_string site_key)
|
where: m.key == ^(Atom.to_string site_key)
|
||||||
|
@ -328,7 +332,7 @@ defmodule Katso.PageAnalyzer do
|
||||||
m -> m
|
m -> m
|
||||||
end
|
end
|
||||||
|
|
||||||
fetch = create_fetch magazine, scores
|
fetch = create_fetch series, magazine, scores
|
||||||
|
|
||||||
Enum.each scores.score_types, fn score_type ->
|
Enum.each scores.score_types, fn score_type ->
|
||||||
create_fetch_score fetch, score_type
|
create_fetch_score fetch, score_type
|
||||||
|
@ -336,7 +340,11 @@ defmodule Katso.PageAnalyzer do
|
||||||
|
|
||||||
Enum.reject(scores.matches, fn {_, score_types} -> score_types == [] end)
|
Enum.reject(scores.matches, fn {_, score_types} -> score_types == [] end)
|
||||||
|> Enum.each fn {match, score_types} ->
|
|> Enum.each fn {match, score_types} ->
|
||||||
title = create_title fetch, {match, score_types}
|
sum = Enum.reduce score_types, 0, fn {_, _, score_amount}, acc ->
|
||||||
|
acc + score_amount
|
||||||
|
end
|
||||||
|
|
||||||
|
title = create_title fetch, match, sum
|
||||||
|
|
||||||
Enum.each score_types, fn score_type ->
|
Enum.each score_types, fn score_type ->
|
||||||
create_title_score title, score_type
|
create_title_score title, score_type
|
||||||
|
@ -345,43 +353,73 @@ defmodule Katso.PageAnalyzer do
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp create_magazine(site, site_key) do
|
defp create_series() do
|
||||||
Repo.insert Magazine.changeset %Magazine{}, %{
|
%Series{}
|
||||||
name: site.name,
|
|> Series.changeset(%{})
|
||||||
key: Atom.to_string(site_key)
|
|> Repo.insert
|
||||||
}
|
|> ok_or_die
|
||||||
end
|
end
|
||||||
|
|
||||||
defp create_fetch(magazine, scores) do
|
defp create_magazine(site, site_key) do
|
||||||
Repo.insert Fetch.changeset %Fetch{}, %{
|
%Magazine{}
|
||||||
|
|> Magazine.changeset(%{
|
||||||
|
name: site.name,
|
||||||
|
key: Atom.to_string(site_key)
|
||||||
|
})
|
||||||
|
|> Repo.insert
|
||||||
|
|> ok_or_die
|
||||||
|
end
|
||||||
|
|
||||||
|
defp create_fetch(series, magazine, scores) do
|
||||||
|
%Fetch{}
|
||||||
|
|> Fetch.changeset(%{
|
||||||
|
series_id: series.id,
|
||||||
total_score: scores.total_score,
|
total_score: scores.total_score,
|
||||||
total_titles: scores.total_titles,
|
total_titles: scores.total_titles,
|
||||||
relative_score: scores.relative_score,
|
relative_score: scores.relative_score,
|
||||||
magazine_id: magazine.id
|
magazine_id: magazine.id
|
||||||
}
|
})
|
||||||
|
|> Repo.insert
|
||||||
|
|> ok_or_die
|
||||||
end
|
end
|
||||||
|
|
||||||
defp create_title(fetch, {title, _}) do
|
defp create_title(fetch, title, total_score) do
|
||||||
Repo.insert Title.changeset %Title{}, %{
|
%Title{}
|
||||||
|
|> Title.changeset(%{
|
||||||
title: title,
|
title: title,
|
||||||
fetch_id: fetch.id
|
fetch_id: fetch.id,
|
||||||
}
|
total_score: total_score
|
||||||
|
})
|
||||||
|
|> Repo.insert
|
||||||
|
|> ok_or_die
|
||||||
end
|
end
|
||||||
|
|
||||||
defp create_title_score(title, {score_type, score_words, score_amount}) do
|
defp create_title_score(title, {score_type, score_words, score_amount}) do
|
||||||
Repo.insert TitleScore.changeset %TitleScore{}, %{
|
%TitleScore{}
|
||||||
|
|> TitleScore.changeset(%{
|
||||||
score_type: Atom.to_string(score_type),
|
score_type: Atom.to_string(score_type),
|
||||||
score_words: score_words,
|
score_words: score_words,
|
||||||
score_amount: score_amount,
|
score_amount: score_amount,
|
||||||
title_id: title.id
|
title_id: title.id
|
||||||
}
|
})
|
||||||
|
|> Repo.insert
|
||||||
|
|> ok_or_die
|
||||||
end
|
end
|
||||||
|
|
||||||
defp create_fetch_score(fetch, {score_type, score_amount}) do
|
defp create_fetch_score(fetch, {score_type, score_amount}) do
|
||||||
Repo.insert FetchScore.changeset %FetchScore{}, %{
|
%FetchScore{}
|
||||||
|
|> FetchScore.changeset(%{
|
||||||
score_type: Atom.to_string(score_type),
|
score_type: Atom.to_string(score_type),
|
||||||
score_amount: score_amount,
|
score_amount: score_amount,
|
||||||
fetch_id: fetch.id
|
fetch_id: fetch.id
|
||||||
}
|
})
|
||||||
|
|> Repo.insert
|
||||||
|
|> ok_or_die
|
||||||
|
end
|
||||||
|
|
||||||
|
defp ok_or_die({:ok, result}), do: result
|
||||||
|
|
||||||
|
defp ok_or_die({:error, err}) do
|
||||||
|
raise err
|
||||||
end
|
end
|
||||||
end
|
end
|
|
@ -11,13 +11,13 @@ defmodule Katso.TitleAnalyzer do
|
||||||
vau: %{
|
vau: %{
|
||||||
s: "Vau! Oho! Ja kaikenlainen muu ihmettely.",
|
s: "Vau! Oho! Ja kaikenlainen muu ihmettely.",
|
||||||
r: ~r/\b(?:vau|oho|ohh?oh?|hups(?:is)?|huh)\b/iu,
|
r: ~r/\b(?:vau|oho|ohh?oh?|hups(?:is)?|huh)\b/iu,
|
||||||
p: 1
|
p: 5
|
||||||
},
|
},
|
||||||
|
|
||||||
nyt: %{
|
nyt: %{
|
||||||
s: "Nyt puhuu X! Nyt se on X!",
|
s: "Nyt puhuu X! Nyt se on X!",
|
||||||
r: ~r/\b(?:nyt se on|nyt puhu[uv])\b/iu,
|
r: ~r/\b(?:nyt se on|nyt puhu[uv])\b/iu,
|
||||||
p: 1
|
p: 3
|
||||||
},
|
},
|
||||||
|
|
||||||
pronominit: %{
|
pronominit: %{
|
||||||
|
@ -28,16 +28,17 @@ defmodule Katso.TitleAnalyzer do
|
||||||
tämänlaiset|tämänlaisia|tämänlaista|
|
tämänlaiset|tämänlaisia|tämänlaista|
|
||||||
nämä|näissä|näillä|näistä|näihin|näille|näiden|näiltä|näitä|näin|
|
nämä|näissä|näillä|näistä|näihin|näille|näiden|näiltä|näitä|näin|
|
||||||
hän|hänessä|hänellä|hänestä|häneen|hänelle|hänen|häneltä|häntä|
|
hän|hänessä|hänellä|hänestä|häneen|hänelle|hänen|häneltä|häntä|
|
||||||
he|heissä|heillä|heistä|heihin|heille|heidän|heiltä|heitä|heistä
|
he|heissä|heillä|heistä|heihin|heille|heidän|heiltä|heitä|heistä|
|
||||||
|
ne|niissä|niillä|niistä|niihin|niille|niiden|niiltä|niitä|niistä
|
||||||
)(?:kin)?\b
|
)(?:kin)?\b
|
||||||
/ixu,
|
/iux,
|
||||||
p: 1
|
p: 3
|
||||||
},
|
},
|
||||||
|
|
||||||
kysymys: %{
|
kysymys: %{
|
||||||
s: "Kysymykset otsikoissa. Yleensä näihin vastaus on ”ei”.",
|
s: "Kysymykset otsikoissa. Yleensä näihin vastaus on ”ei”.",
|
||||||
r: ~r/\?/u,
|
r: ~r/\?/,
|
||||||
p: 1
|
p: 2
|
||||||
},
|
},
|
||||||
|
|
||||||
huuto: %{
|
huuto: %{
|
||||||
|
@ -69,38 +70,42 @@ defmodule Katso.TitleAnalyzer do
|
||||||
|vihdoin
|
|vihdoin
|
||||||
|avautu
|
|avautu
|
||||||
|tilit(?:y|t)
|
|tilit(?:y|t)
|
||||||
|hyytävi?ä
|
|hyytäv
|
||||||
|jäätävi?ä
|
|jäätäv
|
||||||
|et usko
|
|et usko
|
||||||
|kansa\b
|
|kansa\b
|
||||||
|testaa\b
|
|testaa\b
|
||||||
|arvaa
|
|arvaa
|
||||||
|keksi(?:\b|tkö)
|
|
||||||
|erikoi(?:s|n)
|
|erikoi(?:s|n)
|
||||||
|nolo(?:\b|a|i)
|
|nolo(?:\b|a|i)
|
||||||
|sensaatio
|
|sensaatio
|
||||||
|omitui
|
|omitui
|
||||||
|
|(?:päätä)?huim
|
||||||
)
|
)
|
||||||
/iux,
|
/iux,
|
||||||
p: 1
|
p: 3
|
||||||
},
|
},
|
||||||
|
|
||||||
katso: %{
|
katso: %{
|
||||||
s: "Kehotus katsomaan jotain lisäsisältöä, joka lähes poikkeuksetta on hyödytöntä.",
|
s: "Kehotus katsomaan jotain lisäsisältöä, joka lähes poikkeuksetta on hyödytöntä.",
|
||||||
r: ~r/\bkatso\b/u,
|
r: ~r/\bkatso\b/iu,
|
||||||
p: 1
|
p: 5
|
||||||
},
|
},
|
||||||
|
|
||||||
some: %{
|
some: %{
|
||||||
s: "Sosiaalinen media on turhaa hömpötystä.",
|
s: "Sosiaalinen media on turhaa hömpötystä.",
|
||||||
r: ~r/\bsome|twiitt|peukut(?:u|t)/,
|
r: ~r/\bsome|twii?tt|peukut(?:u|t)|facebook|insta(?:gram)?|pinterest/iu,
|
||||||
p: 1
|
p: 1
|
||||||
|
},
|
||||||
|
|
||||||
|
arvaatko: %{
|
||||||
|
s: "Arvaatko? Uskaltaisitko? Viitsisitkö? jne.",
|
||||||
|
r: ~r/tko\b|tkö\b/iu,
|
||||||
|
p: 2
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def analyze(str) do
|
def analyze(str) do
|
||||||
str = convert_utf8 str
|
|
||||||
|
|
||||||
Map.keys(@rules)
|
Map.keys(@rules)
|
||||||
|> Enum.map(fn key ->
|
|> Enum.map(fn key ->
|
||||||
rule = @rules[key]
|
rule = @rules[key]
|
||||||
|
@ -119,18 +124,4 @@ defmodule Katso.TitleAnalyzer do
|
||||||
matches -> Enum.map matches, fn match -> Enum.at match, 0 end
|
matches -> Enum.map matches, fn match -> Enum.at match, 0 end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def convert_utf8(str) do
|
|
||||||
case String.valid? str do
|
|
||||||
true -> str
|
|
||||||
false ->
|
|
||||||
String.codepoints(str)
|
|
||||||
|> Enum.reduce "", fn codepoint, acc ->
|
|
||||||
acc <> case codepoint do
|
|
||||||
<<byte>> -> <<byte :: utf8>>
|
|
||||||
char -> char
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
end
|
39
mix.exs
39
mix.exs
|
@ -3,39 +3,54 @@ defmodule Katso.Mixfile do
|
||||||
|
|
||||||
def project do
|
def project do
|
||||||
[app: :katso,
|
[app: :katso,
|
||||||
version: "0.0.1",
|
version: "0.0.2",
|
||||||
elixir: "~> 1.0",
|
elixir: "~> 1.0",
|
||||||
elixirc_paths: elixirc_paths(Mix.env),
|
elixirc_paths: elixirc_paths(Mix.env),
|
||||||
compilers: [:phoenix] ++ Mix.compilers,
|
compilers: [:phoenix] ++ Mix.compilers,
|
||||||
build_embedded: Mix.env == :prod,
|
build_embedded: Mix.env == :prod,
|
||||||
start_permanent: Mix.env == :prod,
|
start_permanent: Mix.env == :prod,
|
||||||
|
aliases: aliases,
|
||||||
deps: deps]
|
deps: deps]
|
||||||
end
|
end
|
||||||
|
|
||||||
# Configuration for the OTP application
|
# Configuration for the OTP application.
|
||||||
#
|
#
|
||||||
# Type `mix help compile.app` for more information
|
# Type `mix help compile.app` for more information.
|
||||||
def application do
|
def application do
|
||||||
[mod: {Katso, []},
|
[mod: {Katso, []},
|
||||||
applications: [:phoenix, :cowboy, :logger, :ecto, :httpoison]]
|
applications: [:phoenix, :phoenix_html, :cowboy, :logger,
|
||||||
|
:phoenix_ecto, :postgrex, :httpoison, :tzdata]]
|
||||||
end
|
end
|
||||||
|
|
||||||
# Specifies which paths to compile per environment
|
# Specifies which paths to compile per environment.
|
||||||
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
|
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
|
||||||
defp elixirc_paths(_), do: ["lib", "web"]
|
defp elixirc_paths(_), do: ["lib", "web"]
|
||||||
|
|
||||||
# Specifies your project dependencies
|
# Specifies your project dependencies.
|
||||||
#
|
#
|
||||||
# Type `mix help deps` for examples and options
|
# Type `mix help deps` for examples and options.
|
||||||
defp deps do
|
defp deps do
|
||||||
[{:phoenix, "~> 0.11"},
|
[{:phoenix, "~> 1.0.3"},
|
||||||
{:phoenix_ecto, "~> 0.3"},
|
{:phoenix_ecto, "~> 1.1"},
|
||||||
{:postgrex, ">= 0.0.0"},
|
{:postgrex, ">= 0.0.0"},
|
||||||
{:phoenix_live_reload, "~> 0.3"},
|
{:phoenix_html, "~> 2.1"},
|
||||||
|
{:phoenix_live_reload, "~> 1.0", only: :dev},
|
||||||
{:cowboy, "~> 1.0"},
|
{:cowboy, "~> 1.0"},
|
||||||
{:excoder, "1.3.0", git: "git@bitbucket.org:Nicd/excoder.git"},
|
{:excoder, "1.3.0", git: "git@bitbucket.org:Nicd/excoder.git"},
|
||||||
{:floki, "~> 0.1"},
|
{:floki, "~> 0.7.0"},
|
||||||
{:httpoison, "~> 0.6"}
|
{:httpoison, "~> 0.8.0"},
|
||||||
|
{:timex, "~> 1.0.0-rc3"}
|
||||||
]
|
]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Aliases are shortcut or tasks specific to the current project.
|
||||||
|
# For example, to create, migrate and run the seeds file at once:
|
||||||
|
#
|
||||||
|
# $ mix ecto.setup
|
||||||
|
#
|
||||||
|
# See the documentation for `Mix` for more info on aliases.
|
||||||
|
defp aliases do
|
||||||
|
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
|
||||||
|
"ecto.reset": ["ecto.drop", "ecto.setup"]]
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
39
mix.lock
39
mix.lock
|
@ -1,21 +1,26 @@
|
||||||
%{"cowboy": {:hex, :cowboy, "1.0.0"},
|
%{"certifi": {:hex, :certifi, "0.3.0"},
|
||||||
"cowlib": {:hex, :cowlib, "1.0.1"},
|
"combine": {:hex, :combine, "0.5.4"},
|
||||||
|
"cowboy": {:hex, :cowboy, "1.0.4"},
|
||||||
|
"cowlib": {:hex, :cowlib, "1.0.2"},
|
||||||
"decimal": {:hex, :decimal, "1.1.0"},
|
"decimal": {:hex, :decimal, "1.1.0"},
|
||||||
"ecto": {:hex, :ecto, "0.10.2"},
|
"ecto": {:hex, :ecto, "1.0.6"},
|
||||||
"excoder": {:git, "git@bitbucket.org:Nicd/excoder.git", "274736c587c3b48fa4c86b86c94bf915977385d3", []},
|
"excoder": {:git, "git@bitbucket.org:Nicd/excoder.git", "274736c587c3b48fa4c86b86c94bf915977385d3", []},
|
||||||
"floki": {:hex, :floki, "0.1.1"},
|
"floki": {:hex, :floki, "0.7.1"},
|
||||||
"fs": {:hex, :fs, "0.9.1"},
|
"fs": {:hex, :fs, "0.9.2"},
|
||||||
"hackney": {:hex, :hackney, "1.1.0"},
|
"hackney": {:hex, :hackney, "1.4.6"},
|
||||||
"httpoison": {:hex, :httpoison, "0.6.2"},
|
"httpoison": {:hex, :httpoison, "0.8.0"},
|
||||||
"iconv": {:git, "https://github.com/erylee/erlang-iconv.git", "bd9ed8cc16ba3595fc6993dc2e6bf97273ce7f6a", []},
|
|
||||||
"idna": {:hex, :idna, "1.0.2"},
|
"idna": {:hex, :idna, "1.0.2"},
|
||||||
|
"mimerl": {:hex, :mimerl, "1.0.0"},
|
||||||
"mochiweb": {:hex, :mochiweb, "2.12.2"},
|
"mochiweb": {:hex, :mochiweb, "2.12.2"},
|
||||||
"phoenix": {:hex, :phoenix, "0.11.0"},
|
"phoenix": {:hex, :phoenix, "1.0.3"},
|
||||||
"phoenix_ecto": {:hex, :phoenix_ecto, "0.3.1"},
|
"phoenix_ecto": {:hex, :phoenix_ecto, "1.2.0"},
|
||||||
"phoenix_live_reload": {:hex, :phoenix_live_reload, "0.3.1"},
|
"phoenix_html": {:hex, :phoenix_html, "2.2.0"},
|
||||||
"plug": {:hex, :plug, "0.11.3"},
|
"phoenix_live_reload": {:hex, :phoenix_live_reload, "1.0.1"},
|
||||||
"poison": {:hex, :poison, "1.4.0"},
|
"plug": {:hex, :plug, "1.0.3"},
|
||||||
"poolboy": {:hex, :poolboy, "1.4.2"},
|
"poison": {:hex, :poison, "1.5.0"},
|
||||||
"postgrex": {:hex, :postgrex, "0.8.1"},
|
"poolboy": {:hex, :poolboy, "1.5.1"},
|
||||||
"ranch": {:hex, :ranch, "1.0.0"},
|
"postgrex": {:hex, :postgrex, "0.9.1"},
|
||||||
"ssl_verify_hostname": {:hex, :ssl_verify_hostname, "1.0.4"}}
|
"ranch": {:hex, :ranch, "1.2.0"},
|
||||||
|
"ssl_verify_hostname": {:hex, :ssl_verify_hostname, "1.0.5"},
|
||||||
|
"timex": {:hex, :timex, "1.0.0-rc3"},
|
||||||
|
"tzdata": {:hex, :tzdata, "0.5.5"}}
|
||||||
|
|
10
package.json
10
package.json
|
@ -1,13 +1,13 @@
|
||||||
{
|
{
|
||||||
"repository": {
|
"repository": {},
|
||||||
},
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"brunch": "git://github.com/brunch/brunch#5176b6b4bf70cd8cb9dad0058dd3e83e8d983218",
|
"babel-brunch": "^5.1.1",
|
||||||
"babel-brunch": "^4.0.0",
|
"brunch": "^1.8.5",
|
||||||
"clean-css-brunch": ">= 1.0 < 1.8",
|
"clean-css-brunch": ">= 1.0 < 1.8",
|
||||||
"css-brunch": ">= 1.0 < 1.8",
|
"css-brunch": ">= 1.0 < 1.8",
|
||||||
|
"dygraphs": "^1.1.1",
|
||||||
"javascript-brunch": ">= 1.0 < 1.8",
|
"javascript-brunch": ">= 1.0 < 1.8",
|
||||||
"sass-brunch": "git://github.com/brunch/sass-brunch.git#master",
|
"jquery": "^2.1.4",
|
||||||
"uglify-js-brunch": ">= 1.0 < 1.8"
|
"uglify-js-brunch": ">= 1.0 < 1.8"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
13
priv/repo/migrations/20151128094533_add_series_table.exs
Normal file
13
priv/repo/migrations/20151128094533_add_series_table.exs
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
defmodule Katso.Repo.Migrations.AddSeriesTable do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
create table(:series) do
|
||||||
|
timestamps
|
||||||
|
end
|
||||||
|
|
||||||
|
alter table(:fetches) do
|
||||||
|
add :series_id, references(:series)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
|
@ -0,0 +1,9 @@
|
||||||
|
defmodule Katso.Repo.Migrations.AddTitleTotalScore do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
def change do
|
||||||
|
alter table(:titles) do
|
||||||
|
add :total_score, :integer
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
11
priv/repo/seeds.exs
Normal file
11
priv/repo/seeds.exs
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
# Script for populating the database. You can run it as:
|
||||||
|
#
|
||||||
|
# mix run priv/repo/seeds.exs
|
||||||
|
#
|
||||||
|
# Inside the script, you can read and write to any of your
|
||||||
|
# repositories directly:
|
||||||
|
#
|
||||||
|
# Katso.Repo.insert!(%SomeModel{})
|
||||||
|
#
|
||||||
|
# We recommend using the bang functions (`insert!`, `update!`
|
||||||
|
# and so on) as they will fail if something goes wrong.
|
|
@ -3,6 +3,6 @@ defmodule Katso.PageControllerTest do
|
||||||
|
|
||||||
test "GET /" do
|
test "GET /" do
|
||||||
conn = get conn(), "/"
|
conn = get conn(), "/"
|
||||||
assert conn.resp_body =~ "Welcome to Phoenix!"
|
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
40
test/support/channel_case.ex
Normal file
40
test/support/channel_case.ex
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
defmodule Katso.ChannelCase do
|
||||||
|
@moduledoc """
|
||||||
|
This module defines the test case to be used by
|
||||||
|
channel tests.
|
||||||
|
|
||||||
|
Such tests rely on `Phoenix.ChannelTest` and also
|
||||||
|
imports other functionality to make it easier
|
||||||
|
to build and query models.
|
||||||
|
|
||||||
|
Finally, if the test case interacts with the database,
|
||||||
|
it cannot be async. For this reason, every test runs
|
||||||
|
inside a transaction which is reset at the beginning
|
||||||
|
of the test unless the test case is marked as async.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use ExUnit.CaseTemplate
|
||||||
|
|
||||||
|
using do
|
||||||
|
quote do
|
||||||
|
# Import conveniences for testing with channels
|
||||||
|
use Phoenix.ChannelTest
|
||||||
|
|
||||||
|
alias Katso.Repo
|
||||||
|
import Ecto.Model
|
||||||
|
import Ecto.Query, only: [from: 2]
|
||||||
|
|
||||||
|
|
||||||
|
# The default endpoint for testing
|
||||||
|
@endpoint Katso.Endpoint
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
setup tags do
|
||||||
|
unless tags[:async] do
|
||||||
|
Ecto.Adapters.SQL.restart_test_transaction(Katso.Repo, [])
|
||||||
|
end
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
end
|
|
@ -4,7 +4,7 @@ defmodule Katso.ConnCase do
|
||||||
tests that require setting up a connection.
|
tests that require setting up a connection.
|
||||||
|
|
||||||
Such tests rely on `Phoenix.ConnTest` and also
|
Such tests rely on `Phoenix.ConnTest` and also
|
||||||
imports other functionalities to make it easier
|
imports other functionality to make it easier
|
||||||
to build and query models.
|
to build and query models.
|
||||||
|
|
||||||
Finally, if the test case interacts with the database,
|
Finally, if the test case interacts with the database,
|
||||||
|
@ -20,12 +20,10 @@ defmodule Katso.ConnCase do
|
||||||
# Import conveniences for testing with connections
|
# Import conveniences for testing with connections
|
||||||
use Phoenix.ConnTest
|
use Phoenix.ConnTest
|
||||||
|
|
||||||
# Alias the data repository and import query/model functions
|
|
||||||
alias Katso.Repo
|
alias Katso.Repo
|
||||||
import Ecto.Model
|
import Ecto.Model
|
||||||
import Ecto.Query, only: [from: 2]
|
import Ecto.Query, only: [from: 2]
|
||||||
|
|
||||||
# Import URL helpers from the router
|
|
||||||
import Katso.Router.Helpers
|
import Katso.Router.Helpers
|
||||||
|
|
||||||
# The default endpoint for testing
|
# The default endpoint for testing
|
||||||
|
|
59
test/support/model_case.ex
Normal file
59
test/support/model_case.ex
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
defmodule Katso.ModelCase do
|
||||||
|
@moduledoc """
|
||||||
|
This module defines the test case to be used by
|
||||||
|
model tests.
|
||||||
|
|
||||||
|
You may define functions here to be used as helpers in
|
||||||
|
your model tests. See `errors_on/2`'s definition as reference.
|
||||||
|
|
||||||
|
Finally, if the test case interacts with the database,
|
||||||
|
it cannot be async. For this reason, every test runs
|
||||||
|
inside a transaction which is reset at the beginning
|
||||||
|
of the test unless the test case is marked as async.
|
||||||
|
"""
|
||||||
|
|
||||||
|
use ExUnit.CaseTemplate
|
||||||
|
|
||||||
|
using do
|
||||||
|
quote do
|
||||||
|
alias Katso.Repo
|
||||||
|
import Ecto.Model
|
||||||
|
import Ecto.Query, only: [from: 2]
|
||||||
|
import Katso.ModelCase
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
setup tags do
|
||||||
|
unless tags[:async] do
|
||||||
|
Ecto.Adapters.SQL.restart_test_transaction(Katso.Repo, [])
|
||||||
|
end
|
||||||
|
|
||||||
|
:ok
|
||||||
|
end
|
||||||
|
|
||||||
|
@doc """
|
||||||
|
Helper for returning list of errors in model when passed certain data.
|
||||||
|
|
||||||
|
## Examples
|
||||||
|
|
||||||
|
Given a User model that lists `:name` as a required field and validates
|
||||||
|
`:password` to be safe, it would return:
|
||||||
|
|
||||||
|
iex> errors_on(%User{}, %{password: "password"})
|
||||||
|
[password: "is unsafe", name: "is blank"]
|
||||||
|
|
||||||
|
You could then write your assertion like:
|
||||||
|
|
||||||
|
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
|
||||||
|
|
||||||
|
You can also create the changeset manually and retrieve the errors
|
||||||
|
field directly:
|
||||||
|
|
||||||
|
iex> changeset = User.changeset(%User{}, password: "password")
|
||||||
|
iex> {:password, "is unsafe"} in changeset.errors
|
||||||
|
true
|
||||||
|
"""
|
||||||
|
def errors_on(model, data) do
|
||||||
|
model.__struct__.changeset(model, data).errors
|
||||||
|
end
|
||||||
|
end
|
|
@ -1,6 +1,6 @@
|
||||||
ExUnit.start
|
ExUnit.start
|
||||||
|
|
||||||
# Create the database, run migrations, and start the test transaction.
|
|
||||||
Mix.Task.run "ecto.create", ["--quiet"]
|
Mix.Task.run "ecto.create", ["--quiet"]
|
||||||
Mix.Task.run "ecto.migrate", ["--quiet"]
|
Mix.Task.run "ecto.migrate", ["--quiet"]
|
||||||
Ecto.Adapters.SQL.begin_test_transaction(Katso.Repo)
|
Ecto.Adapters.SQL.begin_test_transaction(Katso.Repo)
|
||||||
|
|
||||||
|
|
21
test/views/error_view_test.exs
Normal file
21
test/views/error_view_test.exs
Normal file
|
@ -0,0 +1,21 @@
|
||||||
|
defmodule Katso.ErrorViewTest do
|
||||||
|
use Katso.ConnCase, async: true
|
||||||
|
|
||||||
|
# Bring render/3 and render_to_string/3 for testing custom views
|
||||||
|
import Phoenix.View
|
||||||
|
|
||||||
|
test "renders 404.html" do
|
||||||
|
assert render_to_string(Katso.ErrorView, "404.html", []) ==
|
||||||
|
"Page not found"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "render 500.html" do
|
||||||
|
assert render_to_string(Katso.ErrorView, "500.html", []) ==
|
||||||
|
"Server internal error"
|
||||||
|
end
|
||||||
|
|
||||||
|
test "render any other" do
|
||||||
|
assert render_to_string(Katso.ErrorView, "505.html", []) ==
|
||||||
|
"Server internal error"
|
||||||
|
end
|
||||||
|
end
|
3
test/views/layout_view_test.exs
Normal file
3
test/views/layout_view_test.exs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
defmodule Katso.LayoutViewTest do
|
||||||
|
use Katso.ConnCase, async: true
|
||||||
|
end
|
3
test/views/page_view_test.exs
Normal file
3
test/views/page_view_test.exs
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
defmodule Katso.PageViewTest do
|
||||||
|
use Katso.ConnCase, async: true
|
||||||
|
end
|
37
web/channels/user_socket.ex
Normal file
37
web/channels/user_socket.ex
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
defmodule Katso.UserSocket do
|
||||||
|
use Phoenix.Socket
|
||||||
|
|
||||||
|
## Channels
|
||||||
|
# channel "rooms:*", Katso.RoomChannel
|
||||||
|
|
||||||
|
## Transports
|
||||||
|
transport :websocket, Phoenix.Transports.WebSocket
|
||||||
|
# transport :longpoll, Phoenix.Transports.LongPoll
|
||||||
|
|
||||||
|
# Socket params are passed from the client and can
|
||||||
|
# be used to verify and authenticate a user. After
|
||||||
|
# verification, you can put default assigns into
|
||||||
|
# the socket that will be set for all channels, ie
|
||||||
|
#
|
||||||
|
# {:ok, assign(socket, :user_id, verified_user_id)}
|
||||||
|
#
|
||||||
|
# To deny connection, return `:error`.
|
||||||
|
#
|
||||||
|
# See `Phoenix.Token` documentation for examples in
|
||||||
|
# performing token verification on connect.
|
||||||
|
def connect(_params, socket) do
|
||||||
|
{:ok, socket}
|
||||||
|
end
|
||||||
|
|
||||||
|
# Socket id's are topics that allow you to identify all sockets for a given user:
|
||||||
|
#
|
||||||
|
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
|
||||||
|
#
|
||||||
|
# Would allow you to broadcast a "disconnect" event and terminate
|
||||||
|
# all active sockets and channels for a given user:
|
||||||
|
#
|
||||||
|
# Katso.Endpoint.broadcast("users_socket:" <> user.id, "disconnect", %{})
|
||||||
|
#
|
||||||
|
# Returning `nil` makes this socket anonymous.
|
||||||
|
def id(_socket), do: nil
|
||||||
|
end
|
|
@ -1,9 +1,53 @@
|
||||||
defmodule Katso.PageController do
|
defmodule Katso.PageController do
|
||||||
use Katso.Web, :controller
|
use Katso.Web, :controller
|
||||||
|
use Timex
|
||||||
plug :action
|
alias Katso.Repo
|
||||||
|
alias Katso.Magazine
|
||||||
|
alias Katso.Series
|
||||||
|
alias Katso.Title
|
||||||
|
import Ecto.Query, only: [from: 2, order_by: 3]
|
||||||
|
|
||||||
def index(conn, _params) do
|
def index(conn, _params) do
|
||||||
render conn, "index.html"
|
series_data =
|
||||||
|
from(s in Series,
|
||||||
|
join: f in assoc(s, :fetches),
|
||||||
|
join: m in assoc(f, :magazine),
|
||||||
|
preload: [fetches: {f, magazine: m}],
|
||||||
|
order_by: [asc: s.inserted_at])
|
||||||
|
|
||||||
|
|> Repo.all()
|
||||||
|
|> Enum.map(fn series ->
|
||||||
|
data = Enum.sort(series.fetches, fn first, second -> first.magazine.id < second.magazine.id end)
|
||||||
|
|> Enum.map(fn fetch -> fetch.relative_score end)
|
||||||
|
|
||||||
|
[series.inserted_at | data]
|
||||||
|
end)
|
||||||
|
|> Poison.Encoder.encode([])
|
||||||
|
|
||||||
|
magazines =
|
||||||
|
from(m in Magazine,
|
||||||
|
select: m.name,
|
||||||
|
order_by: [asc: m.id])
|
||||||
|
|> Repo.all
|
||||||
|
|> Poison.Encoder.encode([])
|
||||||
|
|
||||||
|
week_ago = Date.now |> Date.subtract(Time.to_timestamp(1, :weeks)) |> DateConvert.to_erlang_datetime
|
||||||
|
|
||||||
|
top_titles =
|
||||||
|
from(t in Title,
|
||||||
|
join: f in assoc(t, :fetch),
|
||||||
|
join: ts in assoc(t, :title_scores),
|
||||||
|
join: m in assoc(f, :magazine),
|
||||||
|
preload: [fetch: {f, magazine: m}, title_scores: ts],
|
||||||
|
where: f.inserted_at > ^week_ago,
|
||||||
|
order_by: [desc: t.total_score])
|
||||||
|
|> Repo.all
|
||||||
|
|> Enum.slice(0..10)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> assign(:all_series, series_data)
|
||||||
|
|> assign(:magazines, magazines)
|
||||||
|
|> assign(:top_titles, top_titles)
|
||||||
|
|> render("index.html")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -9,11 +9,12 @@ defmodule Katso.Fetch do
|
||||||
timestamps
|
timestamps
|
||||||
|
|
||||||
belongs_to :magazine, Katso.Magazine
|
belongs_to :magazine, Katso.Magazine
|
||||||
|
belongs_to :series, Katso.Series
|
||||||
has_many :titles, Katso.Title
|
has_many :titles, Katso.Title
|
||||||
has_many :fetch_scores, Katso.FetchScore
|
has_many :fetch_scores, Katso.FetchScore
|
||||||
end
|
end
|
||||||
|
|
||||||
@required_fields ~w(total_score total_titles relative_score magazine_id)
|
@required_fields ~w(total_score total_titles relative_score series_id magazine_id)
|
||||||
@optional_fields ~w()
|
@optional_fields ~w()
|
||||||
|
|
||||||
def changeset(model, params \\ nil) do
|
def changeset(model, params \\ nil) do
|
||||||
|
@ -21,3 +22,11 @@ defmodule Katso.Fetch do
|
||||||
|> cast(params, @required_fields, @optional_fields)
|
|> cast(params, @required_fields, @optional_fields)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defimpl Poison.Encoder, for: Katso.Fetch do
|
||||||
|
def encode(model, opts) do
|
||||||
|
model
|
||||||
|
|> Map.take([:total_score, :total_titles, :relative_score, :magazine, :inserted_at, :updated_at])
|
||||||
|
|> Poison.Encoder.encode(opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
|
@ -16,3 +16,11 @@ defmodule Katso.Magazine do
|
||||||
|> cast(params, @required_fields, @optional_fields)
|
|> cast(params, @required_fields, @optional_fields)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defimpl Poison.Encoder, for: Katso.Magazine do
|
||||||
|
def encode(model, opts) do
|
||||||
|
model
|
||||||
|
|> Map.take([:name, :key])
|
||||||
|
|> Poison.Encoder.encode(opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
24
web/models/series.ex
Normal file
24
web/models/series.ex
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
defmodule Katso.Series do
|
||||||
|
use Katso.Web, :model
|
||||||
|
|
||||||
|
schema "series" do
|
||||||
|
timestamps
|
||||||
|
has_many :fetches, Katso.Fetch
|
||||||
|
end
|
||||||
|
|
||||||
|
@required_fields ~w()
|
||||||
|
@optional_fields ~w()
|
||||||
|
|
||||||
|
def changeset(model, params \\ nil) do
|
||||||
|
model
|
||||||
|
|> cast(params, @required_fields, @optional_fields)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defimpl Poison.Encoder, for: Katso.Series do
|
||||||
|
def encode(model, opts) do
|
||||||
|
model
|
||||||
|
|> Map.take([:inserted_at, :updated_at, :fetches])
|
||||||
|
|> Poison.Encoder.encode(opts)
|
||||||
|
end
|
||||||
|
end
|
|
@ -3,12 +3,13 @@ defmodule Katso.Title do
|
||||||
|
|
||||||
schema "titles" do
|
schema "titles" do
|
||||||
field :title, :string
|
field :title, :string
|
||||||
|
field :total_score, :integer
|
||||||
|
|
||||||
belongs_to :fetch, Katso.Fetch
|
belongs_to :fetch, Katso.Fetch
|
||||||
has_many :title_scores, Katso.TitleScore
|
has_many :title_scores, Katso.TitleScore
|
||||||
end
|
end
|
||||||
|
|
||||||
@required_fields ~w(title fetch_id)
|
@required_fields ~w(title fetch_id total_score)
|
||||||
@optional_fields ~w()
|
@optional_fields ~w()
|
||||||
|
|
||||||
def changeset(model, params \\ nil) do
|
def changeset(model, params \\ nil) do
|
||||||
|
|
|
@ -1,11 +1,16 @@
|
||||||
defmodule Katso.Router do
|
defmodule Katso.Router do
|
||||||
use Phoenix.Router
|
use Katso.Web, :router
|
||||||
|
|
||||||
pipeline :browser do
|
pipeline :browser do
|
||||||
plug :accepts, ["html"]
|
plug :accepts, ["html"]
|
||||||
plug :fetch_session
|
plug :fetch_session
|
||||||
plug :fetch_flash
|
plug :fetch_flash
|
||||||
plug :protect_from_forgery
|
plug :protect_from_forgery
|
||||||
|
plug :put_secure_browser_headers
|
||||||
|
end
|
||||||
|
|
||||||
|
pipeline :api do
|
||||||
|
plug :accepts, ["json"]
|
||||||
end
|
end
|
||||||
|
|
||||||
scope "/", Katso do
|
scope "/", Katso do
|
||||||
|
@ -13,4 +18,9 @@ defmodule Katso.Router do
|
||||||
|
|
||||||
get "/", PageController, :index
|
get "/", PageController, :index
|
||||||
end
|
end
|
||||||
|
|
||||||
|
# Other scopes may use custom stacks.
|
||||||
|
# scope "/api", Katso do
|
||||||
|
# pipe_through :api
|
||||||
|
# end
|
||||||
end
|
end
|
||||||
|
|
5
web/static/assets/robots.txt
Normal file
5
web/static/assets/robots.txt
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
|
||||||
|
#
|
||||||
|
# To ban all spiders from the entire site uncomment the next two lines:
|
||||||
|
# User-agent: *
|
||||||
|
# Disallow: /
|
File diff suppressed because one or more lines are too long
|
@ -1,10 +1,21 @@
|
||||||
import {Socket} from "phoenix"
|
// Brunch automatically concatenates all files in your
|
||||||
|
// watched paths. Those paths can be configured at
|
||||||
|
// config.paths.watched in "brunch-config.js".
|
||||||
|
//
|
||||||
|
// However, those files will only be executed if
|
||||||
|
// explicitly imported. The only exception are files
|
||||||
|
// in vendor, which are never wrapped in imports and
|
||||||
|
// therefore are always executed.
|
||||||
|
|
||||||
// let socket = new Socket("/ws")
|
// Import dependencies
|
||||||
// socket.join("topic:subtopic", {}, chan => {
|
//
|
||||||
// })
|
// If you no longer want to use a dependency, remember
|
||||||
|
// to also remove its path from "config.paths.watched".
|
||||||
|
import "deps/phoenix_html/web/static/js/phoenix_html"
|
||||||
|
|
||||||
let App = {
|
// Import local files
|
||||||
}
|
//
|
||||||
|
// Local files can be imported directly using relative
|
||||||
|
// paths "./socket" or full ones "web/static/js/socket".
|
||||||
|
|
||||||
export default App
|
// import socket from "./socket"
|
||||||
|
|
763
web/static/vendor/phoenix.js
vendored
763
web/static/vendor/phoenix.js
vendored
|
@ -1,763 +0,0 @@
|
||||||
(function(/*! Brunch !*/) {
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
var globals = typeof window !== 'undefined' ? window : global;
|
|
||||||
if (typeof globals.require === 'function') return;
|
|
||||||
|
|
||||||
var modules = {};
|
|
||||||
var cache = {};
|
|
||||||
|
|
||||||
var has = function(object, name) {
|
|
||||||
return ({}).hasOwnProperty.call(object, name);
|
|
||||||
};
|
|
||||||
|
|
||||||
var expand = function(root, name) {
|
|
||||||
var results = [], parts, part;
|
|
||||||
if (/^\.\.?(\/|$)/.test(name)) {
|
|
||||||
parts = [root, name].join('/').split('/');
|
|
||||||
} else {
|
|
||||||
parts = name.split('/');
|
|
||||||
}
|
|
||||||
for (var i = 0, length = parts.length; i < length; i++) {
|
|
||||||
part = parts[i];
|
|
||||||
if (part === '..') {
|
|
||||||
results.pop();
|
|
||||||
} else if (part !== '.' && part !== '') {
|
|
||||||
results.push(part);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return results.join('/');
|
|
||||||
};
|
|
||||||
|
|
||||||
var dirname = function(path) {
|
|
||||||
return path.split('/').slice(0, -1).join('/');
|
|
||||||
};
|
|
||||||
|
|
||||||
var localRequire = function(path) {
|
|
||||||
return function(name) {
|
|
||||||
var dir = dirname(path);
|
|
||||||
var absolute = expand(dir, name);
|
|
||||||
return globals.require(absolute, path);
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
var initModule = function(name, definition) {
|
|
||||||
var module = {id: name, exports: {}};
|
|
||||||
cache[name] = module;
|
|
||||||
definition(module.exports, localRequire(name), module);
|
|
||||||
return module.exports;
|
|
||||||
};
|
|
||||||
|
|
||||||
var require = function(name, loaderPath) {
|
|
||||||
var path = expand(name, '.');
|
|
||||||
if (loaderPath == null) loaderPath = '/';
|
|
||||||
|
|
||||||
if (has(cache, path)) return cache[path].exports;
|
|
||||||
if (has(modules, path)) return initModule(path, modules[path]);
|
|
||||||
|
|
||||||
var dirIndex = expand(path, './index');
|
|
||||||
if (has(cache, dirIndex)) return cache[dirIndex].exports;
|
|
||||||
if (has(modules, dirIndex)) return initModule(dirIndex, modules[dirIndex]);
|
|
||||||
|
|
||||||
throw new Error('Cannot find module "' + name + '" from '+ '"' + loaderPath + '"');
|
|
||||||
};
|
|
||||||
|
|
||||||
var define = function(bundle, fn) {
|
|
||||||
if (typeof bundle === 'object') {
|
|
||||||
for (var key in bundle) {
|
|
||||||
if (has(bundle, key)) {
|
|
||||||
modules[key] = bundle[key];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
modules[bundle] = fn;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
var list = function() {
|
|
||||||
var result = [];
|
|
||||||
for (var item in modules) {
|
|
||||||
if (has(modules, item)) {
|
|
||||||
result.push(item);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
|
|
||||||
globals.require = require;
|
|
||||||
globals.require.define = define;
|
|
||||||
globals.require.register = define;
|
|
||||||
globals.require.list = list;
|
|
||||||
globals.require.brunch = true;
|
|
||||||
})();
|
|
||||||
require.define({'phoenix': function(exports, require, module){ "use strict";
|
|
||||||
|
|
||||||
var _classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } };
|
|
||||||
|
|
||||||
var SOCKET_STATES = { connecting: 0, open: 1, closing: 2, closed: 3 };
|
|
||||||
var CHANNEL_EVENTS = {
|
|
||||||
close: "phx_close",
|
|
||||||
error: "phx_error",
|
|
||||||
join: "phx_join",
|
|
||||||
reply: "phx_reply",
|
|
||||||
leave: "phx_leave"
|
|
||||||
};
|
|
||||||
|
|
||||||
var Push = (function () {
|
|
||||||
|
|
||||||
// Initializes the Push
|
|
||||||
//
|
|
||||||
// chan - The Channel
|
|
||||||
// event - The event, ie `"phx_join"`
|
|
||||||
// payload - The payload, ie `{user_id: 123}`
|
|
||||||
// mergePush - The optional `Push` to merge hooks from
|
|
||||||
|
|
||||||
function Push(chan, event, payload, mergePush) {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
_classCallCheck(this, Push);
|
|
||||||
|
|
||||||
this.chan = chan;
|
|
||||||
this.event = event;
|
|
||||||
this.payload = payload || {};
|
|
||||||
this.receivedResp = null;
|
|
||||||
this.afterHooks = [];
|
|
||||||
this.recHooks = {};
|
|
||||||
this.sent = false;
|
|
||||||
if (mergePush) {
|
|
||||||
mergePush.afterHooks.forEach(function (hook) {
|
|
||||||
return _this.after(hook.ms, hook.callback);
|
|
||||||
});
|
|
||||||
for (var status in mergePush.recHooks) {
|
|
||||||
if (mergePush.recHooks.hasOwnProperty(status)) {
|
|
||||||
this.receive(status, mergePush.recHooks[status]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Push.prototype.send = function send() {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
var ref = this.chan.socket.makeRef();
|
|
||||||
var refEvent = this.chan.replyEventName(ref);
|
|
||||||
|
|
||||||
this.chan.on(refEvent, function (payload) {
|
|
||||||
_this.receivedResp = payload;
|
|
||||||
_this.matchReceive(payload);
|
|
||||||
_this.chan.off(refEvent);
|
|
||||||
_this.cancelAfters();
|
|
||||||
});
|
|
||||||
|
|
||||||
this.startAfters();
|
|
||||||
this.sent = true;
|
|
||||||
this.chan.socket.push({
|
|
||||||
topic: this.chan.topic,
|
|
||||||
event: this.event,
|
|
||||||
payload: this.payload,
|
|
||||||
ref: ref
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Push.prototype.receive = function receive(status, callback) {
|
|
||||||
if (this.receivedResp && this.receivedResp.status === status) {
|
|
||||||
callback(this.receivedResp.response);
|
|
||||||
}
|
|
||||||
this.recHooks[status] = callback;
|
|
||||||
return this;
|
|
||||||
};
|
|
||||||
|
|
||||||
Push.prototype.after = function after(ms, callback) {
|
|
||||||
var timer = null;
|
|
||||||
if (this.sent) {
|
|
||||||
timer = setTimeout(callback, ms);
|
|
||||||
}
|
|
||||||
this.afterHooks.push({ ms: ms, callback: callback, timer: timer });
|
|
||||||
return this;
|
|
||||||
};
|
|
||||||
|
|
||||||
// private
|
|
||||||
|
|
||||||
Push.prototype.matchReceive = function matchReceive(_ref) {
|
|
||||||
var status = _ref.status;
|
|
||||||
var response = _ref.response;
|
|
||||||
var ref = _ref.ref;
|
|
||||||
|
|
||||||
var callback = this.recHooks[status];
|
|
||||||
if (!callback) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.event === CHANNEL_EVENTS.join) {
|
|
||||||
callback(this.chan);
|
|
||||||
} else {
|
|
||||||
callback(response);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Push.prototype.cancelAfters = function cancelAfters() {
|
|
||||||
this.afterHooks.forEach(function (hook) {
|
|
||||||
clearTimeout(hook.timer);
|
|
||||||
hook.timer = null;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Push.prototype.startAfters = function startAfters() {
|
|
||||||
this.afterHooks.map(function (hook) {
|
|
||||||
if (!hook.timer) {
|
|
||||||
hook.timer = setTimeout(function () {
|
|
||||||
return hook.callback();
|
|
||||||
}, hook.ms);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
return Push;
|
|
||||||
})();
|
|
||||||
|
|
||||||
var Channel = exports.Channel = (function () {
|
|
||||||
function Channel(topic, message, callback, socket) {
|
|
||||||
_classCallCheck(this, Channel);
|
|
||||||
|
|
||||||
this.topic = topic;
|
|
||||||
this.message = message;
|
|
||||||
this.callback = callback;
|
|
||||||
this.socket = socket;
|
|
||||||
this.bindings = [];
|
|
||||||
this.afterHooks = [];
|
|
||||||
this.recHooks = {};
|
|
||||||
this.joinPush = new Push(this, CHANNEL_EVENTS.join, this.message);
|
|
||||||
|
|
||||||
this.reset();
|
|
||||||
}
|
|
||||||
|
|
||||||
Channel.prototype.after = function after(ms, callback) {
|
|
||||||
this.joinPush.after(ms, callback);
|
|
||||||
return this;
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.receive = function receive(status, callback) {
|
|
||||||
this.joinPush.receive(status, callback);
|
|
||||||
return this;
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.rejoin = function rejoin() {
|
|
||||||
this.reset();
|
|
||||||
this.joinPush.send();
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.onClose = function onClose(callback) {
|
|
||||||
this.on(CHANNEL_EVENTS.close, callback);
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.onError = function onError(callback) {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
this.on(CHANNEL_EVENTS.error, function (reason) {
|
|
||||||
callback(reason);
|
|
||||||
_this.trigger(CHANNEL_EVENTS.close, "error");
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.reset = function reset() {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
this.bindings = [];
|
|
||||||
var newJoinPush = new Push(this, CHANNEL_EVENTS.join, this.message, this.joinPush);
|
|
||||||
this.joinPush = newJoinPush;
|
|
||||||
this.onError(function (reason) {
|
|
||||||
setTimeout(function () {
|
|
||||||
return _this.rejoin();
|
|
||||||
}, _this.socket.reconnectAfterMs);
|
|
||||||
});
|
|
||||||
this.on(CHANNEL_EVENTS.reply, function (payload) {
|
|
||||||
_this.trigger(_this.replyEventName(payload.ref), payload);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.on = function on(event, callback) {
|
|
||||||
this.bindings.push({ event: event, callback: callback });
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.isMember = function isMember(topic) {
|
|
||||||
return this.topic === topic;
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.off = function off(event) {
|
|
||||||
this.bindings = this.bindings.filter(function (bind) {
|
|
||||||
return bind.event !== event;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.trigger = function trigger(triggerEvent, msg) {
|
|
||||||
this.bindings.filter(function (bind) {
|
|
||||||
return bind.event === triggerEvent;
|
|
||||||
}).map(function (bind) {
|
|
||||||
return bind.callback(msg);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.push = function push(event, payload) {
|
|
||||||
var pushEvent = new Push(this, event, payload);
|
|
||||||
pushEvent.send();
|
|
||||||
|
|
||||||
return pushEvent;
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.replyEventName = function replyEventName(ref) {
|
|
||||||
return "chan_reply_" + ref;
|
|
||||||
};
|
|
||||||
|
|
||||||
Channel.prototype.leave = function leave() {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
return this.push(CHANNEL_EVENTS.leave).receive("ok", function () {
|
|
||||||
_this.socket.leave(_this);
|
|
||||||
chan.reset();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
return Channel;
|
|
||||||
})();
|
|
||||||
|
|
||||||
var Socket = exports.Socket = (function () {
|
|
||||||
|
|
||||||
// Initializes the Socket
|
|
||||||
//
|
|
||||||
// endPoint - The string WebSocket endpoint, ie, "ws://example.com/ws",
|
|
||||||
// "wss://example.com"
|
|
||||||
// "/ws" (inherited host & protocol)
|
|
||||||
// opts - Optional configuration
|
|
||||||
// transport - The Websocket Transport, ie WebSocket, Phoenix.LongPoller.
|
|
||||||
// Defaults to WebSocket with automatic LongPoller fallback.
|
|
||||||
// heartbeatIntervalMs - The millisec interval to send a heartbeat message
|
|
||||||
// reconnectAfterMs - The millisec interval to reconnect after connection loss
|
|
||||||
// logger - The optional function for specialized logging, ie:
|
|
||||||
// `logger: function(msg){ console.log(msg) }`
|
|
||||||
// longpoller_timeout - The maximum timeout of a long poll AJAX request.
|
|
||||||
// Defaults to 20s (double the server long poll timer).
|
|
||||||
//
|
|
||||||
// For IE8 support use an ES5-shim (https://github.com/es-shims/es5-shim)
|
|
||||||
//
|
|
||||||
|
|
||||||
function Socket(endPoint) {
|
|
||||||
var opts = arguments[1] === undefined ? {} : arguments[1];
|
|
||||||
|
|
||||||
_classCallCheck(this, Socket);
|
|
||||||
|
|
||||||
this.states = SOCKET_STATES;
|
|
||||||
this.stateChangeCallbacks = { open: [], close: [], error: [], message: [] };
|
|
||||||
this.flushEveryMs = 50;
|
|
||||||
this.reconnectTimer = null;
|
|
||||||
this.channels = [];
|
|
||||||
this.sendBuffer = [];
|
|
||||||
this.ref = 0;
|
|
||||||
this.transport = opts.transport || window.WebSocket || LongPoller;
|
|
||||||
this.heartbeatIntervalMs = opts.heartbeatIntervalMs || 30000;
|
|
||||||
this.reconnectAfterMs = opts.reconnectAfterMs || 5000;
|
|
||||||
this.logger = opts.logger || function () {}; // noop
|
|
||||||
this.longpoller_timeout = opts.longpoller_timeout || 20000;
|
|
||||||
this.endPoint = this.expandEndpoint(endPoint);
|
|
||||||
|
|
||||||
this.resetBufferTimer();
|
|
||||||
}
|
|
||||||
|
|
||||||
Socket.prototype.protocol = function protocol() {
|
|
||||||
return location.protocol.match(/^https/) ? "wss" : "ws";
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.expandEndpoint = function expandEndpoint(endPoint) {
|
|
||||||
if (endPoint.charAt(0) !== "/") {
|
|
||||||
return endPoint;
|
|
||||||
}
|
|
||||||
if (endPoint.charAt(1) === "/") {
|
|
||||||
return "" + this.protocol() + ":" + endPoint;
|
|
||||||
}
|
|
||||||
|
|
||||||
return "" + this.protocol() + "://" + location.host + "" + endPoint;
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.disconnect = function disconnect(callback, code, reason) {
|
|
||||||
if (this.conn) {
|
|
||||||
this.conn.onclose = function () {}; // noop
|
|
||||||
if (code) {
|
|
||||||
this.conn.close(code, reason || "");
|
|
||||||
} else {
|
|
||||||
this.conn.close();
|
|
||||||
}
|
|
||||||
this.conn = null;
|
|
||||||
}
|
|
||||||
callback && callback();
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.connect = function connect() {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
this.disconnect(function () {
|
|
||||||
_this.conn = new _this.transport(_this.endPoint);
|
|
||||||
_this.conn.timeout = _this.longpoller_timeout;
|
|
||||||
_this.conn.onopen = function () {
|
|
||||||
return _this.onConnOpen();
|
|
||||||
};
|
|
||||||
_this.conn.onerror = function (error) {
|
|
||||||
return _this.onConnError(error);
|
|
||||||
};
|
|
||||||
_this.conn.onmessage = function (event) {
|
|
||||||
return _this.onConnMessage(event);
|
|
||||||
};
|
|
||||||
_this.conn.onclose = function (event) {
|
|
||||||
return _this.onConnClose(event);
|
|
||||||
};
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.resetBufferTimer = function resetBufferTimer() {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
clearTimeout(this.sendBufferTimer);
|
|
||||||
this.sendBufferTimer = setTimeout(function () {
|
|
||||||
return _this.flushSendBuffer();
|
|
||||||
}, this.flushEveryMs);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Logs the message. Override `this.logger` for specialized logging. noops by default
|
|
||||||
|
|
||||||
Socket.prototype.log = function log(msg) {
|
|
||||||
this.logger(msg);
|
|
||||||
};
|
|
||||||
|
|
||||||
// Registers callbacks for connection state change events
|
|
||||||
//
|
|
||||||
// Examples
|
|
||||||
//
|
|
||||||
// socket.onError function(error){ alert("An error occurred") }
|
|
||||||
//
|
|
||||||
|
|
||||||
Socket.prototype.onOpen = function onOpen(callback) {
|
|
||||||
this.stateChangeCallbacks.open.push(callback);
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.onClose = function onClose(callback) {
|
|
||||||
this.stateChangeCallbacks.close.push(callback);
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.onError = function onError(callback) {
|
|
||||||
this.stateChangeCallbacks.error.push(callback);
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.onMessage = function onMessage(callback) {
|
|
||||||
this.stateChangeCallbacks.message.push(callback);
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.onConnOpen = function onConnOpen() {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
clearInterval(this.reconnectTimer);
|
|
||||||
if (!this.conn.skipHeartbeat) {
|
|
||||||
clearInterval(this.heartbeatTimer);
|
|
||||||
this.heartbeatTimer = setInterval(function () {
|
|
||||||
return _this.sendHeartbeat();
|
|
||||||
}, this.heartbeatIntervalMs);
|
|
||||||
}
|
|
||||||
this.rejoinAll();
|
|
||||||
this.stateChangeCallbacks.open.forEach(function (callback) {
|
|
||||||
return callback();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.onConnClose = function onConnClose(event) {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
this.log("WS close:");
|
|
||||||
this.log(event);
|
|
||||||
clearInterval(this.reconnectTimer);
|
|
||||||
clearInterval(this.heartbeatTimer);
|
|
||||||
this.reconnectTimer = setInterval(function () {
|
|
||||||
return _this.connect();
|
|
||||||
}, this.reconnectAfterMs);
|
|
||||||
this.stateChangeCallbacks.close.forEach(function (callback) {
|
|
||||||
return callback(event);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.onConnError = function onConnError(error) {
|
|
||||||
this.log("WS error:");
|
|
||||||
this.log(error);
|
|
||||||
this.stateChangeCallbacks.error.forEach(function (callback) {
|
|
||||||
return callback(error);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.connectionState = function connectionState() {
|
|
||||||
switch (this.conn && this.conn.readyState) {
|
|
||||||
case this.states.connecting:
|
|
||||||
return "connecting";
|
|
||||||
case this.states.open:
|
|
||||||
return "open";
|
|
||||||
case this.states.closing:
|
|
||||||
return "closing";
|
|
||||||
default:
|
|
||||||
return "closed";
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.isConnected = function isConnected() {
|
|
||||||
return this.connectionState() === "open";
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.rejoinAll = function rejoinAll() {
|
|
||||||
this.channels.forEach(function (chan) {
|
|
||||||
return chan.rejoin();
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.join = function join(topic, message, callback) {
|
|
||||||
var chan = new Channel(topic, message, callback, this);
|
|
||||||
this.channels.push(chan);
|
|
||||||
if (this.isConnected()) {
|
|
||||||
chan.rejoin();
|
|
||||||
}
|
|
||||||
return chan;
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.leave = function leave(chan) {
|
|
||||||
this.channels = this.channels.filter(function (c) {
|
|
||||||
return !c.isMember(chan.topic);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.push = function push(data) {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
var callback = function () {
|
|
||||||
return _this.conn.send(JSON.stringify(data));
|
|
||||||
};
|
|
||||||
if (this.isConnected()) {
|
|
||||||
callback();
|
|
||||||
} else {
|
|
||||||
this.sendBuffer.push(callback);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Return the next message ref, accounting for overflows
|
|
||||||
|
|
||||||
Socket.prototype.makeRef = function makeRef() {
|
|
||||||
var newRef = this.ref + 1;
|
|
||||||
if (newRef === this.ref) {
|
|
||||||
this.ref = 0;
|
|
||||||
} else {
|
|
||||||
this.ref = newRef;
|
|
||||||
}
|
|
||||||
|
|
||||||
return this.ref.toString();
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.sendHeartbeat = function sendHeartbeat() {
|
|
||||||
this.push({ topic: "phoenix", event: "heartbeat", payload: {}, ref: this.makeRef() });
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.flushSendBuffer = function flushSendBuffer() {
|
|
||||||
if (this.isConnected() && this.sendBuffer.length > 0) {
|
|
||||||
this.sendBuffer.forEach(function (callback) {
|
|
||||||
return callback();
|
|
||||||
});
|
|
||||||
this.sendBuffer = [];
|
|
||||||
}
|
|
||||||
this.resetBufferTimer();
|
|
||||||
};
|
|
||||||
|
|
||||||
Socket.prototype.onConnMessage = function onConnMessage(rawMessage) {
|
|
||||||
this.log("message received:");
|
|
||||||
this.log(rawMessage);
|
|
||||||
|
|
||||||
var _JSON$parse = JSON.parse(rawMessage.data);
|
|
||||||
|
|
||||||
var topic = _JSON$parse.topic;
|
|
||||||
var event = _JSON$parse.event;
|
|
||||||
var payload = _JSON$parse.payload;
|
|
||||||
|
|
||||||
this.channels.filter(function (chan) {
|
|
||||||
return chan.isMember(topic);
|
|
||||||
}).forEach(function (chan) {
|
|
||||||
return chan.trigger(event, payload);
|
|
||||||
});
|
|
||||||
this.stateChangeCallbacks.message.forEach(function (callback) {
|
|
||||||
callback(topic, event, payload);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
return Socket;
|
|
||||||
})();
|
|
||||||
|
|
||||||
var LongPoller = exports.LongPoller = (function () {
|
|
||||||
function LongPoller(endPoint) {
|
|
||||||
_classCallCheck(this, LongPoller);
|
|
||||||
|
|
||||||
this.retryInMs = 5000;
|
|
||||||
this.endPoint = null;
|
|
||||||
this.token = null;
|
|
||||||
this.sig = null;
|
|
||||||
this.skipHeartbeat = true;
|
|
||||||
this.onopen = function () {}; // noop
|
|
||||||
this.onerror = function () {}; // noop
|
|
||||||
this.onmessage = function () {}; // noop
|
|
||||||
this.onclose = function () {}; // noop
|
|
||||||
this.states = SOCKET_STATES;
|
|
||||||
this.upgradeEndpoint = this.normalizeEndpoint(endPoint);
|
|
||||||
this.pollEndpoint = this.upgradeEndpoint + (/\/$/.test(endPoint) ? "poll" : "/poll");
|
|
||||||
this.readyState = this.states.connecting;
|
|
||||||
|
|
||||||
this.poll();
|
|
||||||
}
|
|
||||||
|
|
||||||
LongPoller.prototype.normalizeEndpoint = function normalizeEndpoint(endPoint) {
|
|
||||||
return endPoint.replace("ws://", "http://").replace("wss://", "https://");
|
|
||||||
};
|
|
||||||
|
|
||||||
LongPoller.prototype.endpointURL = function endpointURL() {
|
|
||||||
return this.pollEndpoint + ("?token=" + encodeURIComponent(this.token) + "&sig=" + encodeURIComponent(this.sig));
|
|
||||||
};
|
|
||||||
|
|
||||||
LongPoller.prototype.closeAndRetry = function closeAndRetry() {
|
|
||||||
this.close();
|
|
||||||
this.readyState = this.states.connecting;
|
|
||||||
};
|
|
||||||
|
|
||||||
LongPoller.prototype.ontimeout = function ontimeout() {
|
|
||||||
this.onerror("timeout");
|
|
||||||
this.closeAndRetry();
|
|
||||||
};
|
|
||||||
|
|
||||||
LongPoller.prototype.poll = function poll() {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
if (!(this.readyState === this.states.open || this.readyState === this.states.connecting)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
Ajax.request("GET", this.endpointURL(), "application/json", null, this.timeout, this.ontimeout.bind(this), function (resp) {
|
|
||||||
if (resp) {
|
|
||||||
var status = resp.status;
|
|
||||||
var token = resp.token;
|
|
||||||
var sig = resp.sig;
|
|
||||||
var messages = resp.messages;
|
|
||||||
|
|
||||||
_this.token = token;
|
|
||||||
_this.sig = sig;
|
|
||||||
} else {
|
|
||||||
var status = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (status) {
|
|
||||||
case 200:
|
|
||||||
messages.forEach(function (msg) {
|
|
||||||
return _this.onmessage({ data: JSON.stringify(msg) });
|
|
||||||
});
|
|
||||||
_this.poll();
|
|
||||||
break;
|
|
||||||
case 204:
|
|
||||||
_this.poll();
|
|
||||||
break;
|
|
||||||
case 410:
|
|
||||||
_this.readyState = _this.states.open;
|
|
||||||
_this.onopen();
|
|
||||||
_this.poll();
|
|
||||||
break;
|
|
||||||
case 0:
|
|
||||||
case 500:
|
|
||||||
_this.onerror();
|
|
||||||
_this.closeAndRetry();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw "unhandled poll status " + status;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
LongPoller.prototype.send = function send(body) {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
Ajax.request("POST", this.endpointURL(), "application/json", body, this.timeout, this.onerror.bind(this, "timeout"), function (resp) {
|
|
||||||
if (!resp || resp.status !== 200) {
|
|
||||||
_this.onerror(status);
|
|
||||||
_this.closeAndRetry();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
LongPoller.prototype.close = function close(code, reason) {
|
|
||||||
this.readyState = this.states.closed;
|
|
||||||
this.onclose();
|
|
||||||
};
|
|
||||||
|
|
||||||
return LongPoller;
|
|
||||||
})();
|
|
||||||
|
|
||||||
var Ajax = exports.Ajax = (function () {
|
|
||||||
function Ajax() {
|
|
||||||
_classCallCheck(this, Ajax);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ajax.request = function request(method, endPoint, accept, body, timeout, ontimeout, callback) {
|
|
||||||
if (window.XDomainRequest) {
|
|
||||||
var req = new XDomainRequest(); // IE8, IE9
|
|
||||||
this.xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback);
|
|
||||||
} else {
|
|
||||||
var req = window.XMLHttpRequest ? new XMLHttpRequest() : // IE7+, Firefox, Chrome, Opera, Safari
|
|
||||||
new ActiveXObject("Microsoft.XMLHTTP"); // IE6, IE5
|
|
||||||
this.xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
Ajax.xdomainRequest = function xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback) {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
req.timeout = timeout;
|
|
||||||
req.open(method, endPoint);
|
|
||||||
req.onload = function () {
|
|
||||||
var response = _this.parseJSON(req.responseText);
|
|
||||||
callback && callback(response);
|
|
||||||
};
|
|
||||||
if (ontimeout) {
|
|
||||||
req.ontimeout = ontimeout;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Work around bug in IE9 that requires an attached onprogress handler
|
|
||||||
req.onprogress = function () {};
|
|
||||||
|
|
||||||
req.send(body);
|
|
||||||
};
|
|
||||||
|
|
||||||
Ajax.xhrRequest = function xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback) {
|
|
||||||
var _this = this;
|
|
||||||
|
|
||||||
req.timeout = timeout;
|
|
||||||
req.open(method, endPoint, true);
|
|
||||||
req.setRequestHeader("Content-Type", accept);
|
|
||||||
req.onerror = function () {
|
|
||||||
callback && callback(null);
|
|
||||||
};
|
|
||||||
req.onreadystatechange = function () {
|
|
||||||
if (req.readyState === _this.states.complete && callback) {
|
|
||||||
var response = _this.parseJSON(req.responseText);
|
|
||||||
callback(response);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
if (ontimeout) {
|
|
||||||
req.ontimeout = ontimeout;
|
|
||||||
}
|
|
||||||
|
|
||||||
req.send(body);
|
|
||||||
};
|
|
||||||
|
|
||||||
Ajax.parseJSON = function parseJSON(resp) {
|
|
||||||
return resp && resp !== "" ? JSON.parse(resp) : null;
|
|
||||||
};
|
|
||||||
|
|
||||||
return Ajax;
|
|
||||||
})();
|
|
||||||
|
|
||||||
Ajax.states = { complete: 4 };
|
|
||||||
exports.__esModule = true;
|
|
||||||
}});
|
|
||||||
if(typeof(window) === 'object' && !window.Phoenix){ window.Phoenix = require('phoenix') };
|
|
25
web/templates/layout/app.html.eex
Normal file
25
web/templates/layout/app.html.eex
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="utf-8">
|
||||||
|
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||||
|
<meta name="description" content="">
|
||||||
|
<meta name="author" content="">
|
||||||
|
|
||||||
|
<title>Oho! Katso ällistyttävät clickbait-tilastot!</title>
|
||||||
|
<link rel="stylesheet" href="<%= static_path(@conn, "/css/app.css") %>">
|
||||||
|
<script type="text/javascript" src="<%= static_path(@conn, "/js/jquery.js") %>"></script>
|
||||||
|
<script type="text/javascript" src="<%= static_path(@conn, "/js/vendor.js") %>"></script>
|
||||||
|
<script type="text/javascript" src="<%= static_path(@conn, "/js/dygraph.js") %>"></script>
|
||||||
|
</head>
|
||||||
|
|
||||||
|
<body>
|
||||||
|
<div class="container" role="main">
|
||||||
|
|
||||||
|
<%= @inner %>
|
||||||
|
|
||||||
|
</div> <!-- /container -->
|
||||||
|
<script type="text/javascript" src="<%= static_path(@conn, "/js/app.js") %>"></script>
|
||||||
|
</body>
|
||||||
|
</html>
|
|
@ -1,35 +0,0 @@
|
||||||
<!DOCTYPE html>
|
|
||||||
<html lang="en">
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
|
||||||
<meta name="description" content="">
|
|
||||||
<meta name="author" content="">
|
|
||||||
|
|
||||||
<title>Oho! Katso kuvat ja tilastot! Arvaatko, mikä lehti on surkein?</title>
|
|
||||||
<link rel="stylesheet" href="<%= static_path(@conn, "/css/app.css") %>">
|
|
||||||
</head>
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<div class="container">
|
|
||||||
<div class="header">
|
|
||||||
<h1>
|
|
||||||
Oho! Katso kuvat!
|
|
||||||
</h1>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<p class="alert alert-info"><%= get_flash(@conn, :info) %></p>
|
|
||||||
<p class="alert alert-danger"><%= get_flash(@conn, :error) %></p>
|
|
||||||
|
|
||||||
<%= @inner %>
|
|
||||||
|
|
||||||
<div class="footer">
|
|
||||||
<p>© Mikko Ahlroth 2015</p>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
</div> <!-- /container -->
|
|
||||||
<script src="<%= static_path(@conn, "/js/app.js") %>"></script>
|
|
||||||
<script>require("web/static/js/app")</script>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
|
@ -1,10 +1,98 @@
|
||||||
<div class="jumbotron">
|
<div class="jumbotron">
|
||||||
<h2>Onko tämä Internetin paras sivusto?</h2>
|
<h1>
|
||||||
<p class="lead">Nyt se on tutkittu! Tämä outo sivusto selvittää, mikä nykyjournalismissa on vikana. Vai onko?</p>
|
Huhhuh! Et usko, kuinka pitkälle nämä lehdet menivät saadakseen klikkauksia!
|
||||||
|
</h1>
|
||||||
|
<p>
|
||||||
|
Katso järkyttävät tilastot alta.
|
||||||
|
</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="row">
|
<h2>
|
||||||
<div class="col-xs-12">
|
Mitä tämä on?
|
||||||
<h1>Tämän hetken lööpeimmät</h1>
|
</h2>
|
||||||
</div>
|
|
||||||
</div>
|
<p>
|
||||||
|
Verkkojournalismiin on nykyisin syöpynyt huolestuttava ilmiö, <em>clickbait</em>. Clickbait tarkoittaa lukijoiden
|
||||||
|
houkuttelemista otsikoilla, jotka dramatisoivat ja revittelevät, vaikka itse uutinen olisikin täysin tavanomainen.
|
||||||
|
Samalla otsikot välttävät paljastamasta, mistä asiassa on oikeasti kyse, jotta lukijan olisi pakko avata artikkeli
|
||||||
|
selvittääkseen.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
Tämä sivusto analysoi joukon suomalaisia verkkolehtiä käyttäen huipputieteellistä algoritmia. Se etsii valtakunnan
|
||||||
|
pahimmat clickbait-otsikot ja surkeimmat lehdet, jotta sinä voit tehdä järkeviä päätöksiä siitä, minkä lukemiseen
|
||||||
|
käytät aikaasi.
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<hr />
|
||||||
|
|
||||||
|
<h2>
|
||||||
|
Lehtien clickbait-pisteiden historia
|
||||||
|
</h2>
|
||||||
|
|
||||||
|
<div id="all_series" style="width:100%; height:400px;"></div>
|
||||||
|
|
||||||
|
<hr />
|
||||||
|
|
||||||
|
<h2>
|
||||||
|
Tämän hetken 💩:t otsikot
|
||||||
|
</h2>
|
||||||
|
|
||||||
|
<table class="table" id="top_title_table">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<!--<th>№</th>-->
|
||||||
|
<th>Pst</th>
|
||||||
|
<th>Lehti</th>
|
||||||
|
<th>Otsikko</th>
|
||||||
|
<th>Syyt</th>
|
||||||
|
<th>Haettu</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tfoot>
|
||||||
|
<tr>
|
||||||
|
<!--<th>№</th>-->
|
||||||
|
<th>Pst</th>
|
||||||
|
<th>Lehti</th>
|
||||||
|
<th>Otsikko</th>
|
||||||
|
<th>Syyt</th>
|
||||||
|
<th>Haettu</th>
|
||||||
|
</tr>
|
||||||
|
</tfoot>
|
||||||
|
<tbody>
|
||||||
|
<%= for {title, i} <- Enum.with_index @top_titles do %>
|
||||||
|
<tr>
|
||||||
|
<!--<td><%= i + 1 %></td>-->
|
||||||
|
<td><%= title.total_score %></td>
|
||||||
|
<td><%= title.fetch.magazine.name %></td>
|
||||||
|
<td><%= title.title %></td>
|
||||||
|
<td>
|
||||||
|
<%= for title_score <- title.title_scores do %>
|
||||||
|
<%= title_score.score_type %>: <%= title_score.score_amount %><br />
|
||||||
|
<% end %>
|
||||||
|
</td>
|
||||||
|
<td><%= title.fetch.inserted_at %></td>
|
||||||
|
</tr>
|
||||||
|
<% end %>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<script type="text/javascript">
|
||||||
|
var $ = require('jquery');
|
||||||
|
var Dygraph = require('dygraphs');
|
||||||
|
|
||||||
|
var elem = document.getElementById('all_series');
|
||||||
|
|
||||||
|
var data = <%= raw @all_series %>;
|
||||||
|
data.map(function (elem) {
|
||||||
|
elem[0] = new Date(elem[0]);
|
||||||
|
return elem;
|
||||||
|
});
|
||||||
|
|
||||||
|
var labels = <%= raw @magazines %>;
|
||||||
|
labels = ["x"].concat(labels);
|
||||||
|
|
||||||
|
new Dygraph(elem, data, {
|
||||||
|
labels: labels
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
|
|
@ -2,11 +2,11 @@ defmodule Katso.ErrorView do
|
||||||
use Katso.Web, :view
|
use Katso.Web, :view
|
||||||
|
|
||||||
def render("404.html", _assigns) do
|
def render("404.html", _assigns) do
|
||||||
"Page not found - 404"
|
"Page not found"
|
||||||
end
|
end
|
||||||
|
|
||||||
def render("500.html", _assigns) do
|
def render("500.html", _assigns) do
|
||||||
"Server internal error - 500"
|
"Server internal error"
|
||||||
end
|
end
|
||||||
|
|
||||||
# In case no render clause matches or no
|
# In case no render clause matches or no
|
||||||
|
|
24
web/web.ex
24
web/web.ex
|
@ -19,6 +19,9 @@ defmodule Katso.Web do
|
||||||
def model do
|
def model do
|
||||||
quote do
|
quote do
|
||||||
use Ecto.Model
|
use Ecto.Model
|
||||||
|
|
||||||
|
import Ecto.Changeset
|
||||||
|
import Ecto.Query, only: [from: 1, from: 2]
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -26,12 +29,10 @@ defmodule Katso.Web do
|
||||||
quote do
|
quote do
|
||||||
use Phoenix.Controller
|
use Phoenix.Controller
|
||||||
|
|
||||||
# Alias the data repository and import query/model functions
|
|
||||||
alias Katso.Repo
|
alias Katso.Repo
|
||||||
import Ecto.Model
|
import Ecto.Model
|
||||||
import Ecto.Query, only: [from: 2]
|
import Ecto.Query, only: [from: 1, from: 2]
|
||||||
|
|
||||||
# Import URL helpers from the router
|
|
||||||
import Katso.Router.Helpers
|
import Katso.Router.Helpers
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -41,13 +42,18 @@ defmodule Katso.Web do
|
||||||
use Phoenix.View, root: "web/templates"
|
use Phoenix.View, root: "web/templates"
|
||||||
|
|
||||||
# Import convenience functions from controllers
|
# Import convenience functions from controllers
|
||||||
import Phoenix.Controller, only: [get_flash: 2]
|
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
|
||||||
|
|
||||||
# Import URL helpers from the router
|
|
||||||
import Katso.Router.Helpers
|
|
||||||
|
|
||||||
# Use all HTML functionality (forms, tags, etc)
|
# Use all HTML functionality (forms, tags, etc)
|
||||||
use Phoenix.HTML
|
use Phoenix.HTML
|
||||||
|
|
||||||
|
import Katso.Router.Helpers
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def router do
|
||||||
|
quote do
|
||||||
|
use Phoenix.Router
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -55,11 +61,9 @@ defmodule Katso.Web do
|
||||||
quote do
|
quote do
|
||||||
use Phoenix.Channel
|
use Phoenix.Channel
|
||||||
|
|
||||||
# Alias the data repository and import query/model functions
|
|
||||||
alias Katso.Repo
|
alias Katso.Repo
|
||||||
import Ecto.Model
|
import Ecto.Model
|
||||||
import Ecto.Query, only: [from: 2]
|
import Ecto.Query, only: [from: 1, from: 2]
|
||||||
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue