Häxfest 2015!!!!!

This commit is contained in:
Mikko Ahlroth 2015-11-28 16:27:58 +02:00
parent 8d0806f6f0
commit 559a063c3d
42 changed files with 732 additions and 987 deletions

View file

@ -1,8 +1,19 @@
# Katso
To start your new Phoenix application:
To start your Phoenix app:
1. Install dependencies with `mix deps.get`
2. Start Phoenix endpoint with `mix phoenix.server`
1. Install dependencies with `mix deps.get`
2. Create and migrate your database with `mix ecto.create && mix ecto.migrate`
3. Start Phoenix endpoint with `mix phoenix.server`
Now you can visit `localhost:4000` from your browser.
Now you can visit [`localhost:4000`](http://localhost:4000) from your browser.
Ready to run in production? Please [check our deployment guides](http://www.phoenixframework.org/docs/deployment).
## Learn more
* Official website: http://www.phoenixframework.org/
* Guides: http://phoenixframework.org/docs/overview
* Docs: http://hexdocs.pm/phoenix
* Mailing list: http://groups.google.com/group/phoenix-talk
* Source: https://github.com/phoenixframework/phoenix

View file

@ -2,28 +2,48 @@ exports.config = {
// See http://brunch.io/#documentation for docs.
files: {
javascripts: {
joinTo: 'js/app.js'
// To change the order of concatenation of files, explictly mention here
// https://github.com/brunch/brunch/tree/stable/docs#concatenation
// To use a separate vendor.js bundle, specify two files path
// https://github.com/brunch/brunch/blob/stable/docs/config.md#files
joinTo: {
"js/app.js": /^(web\/static\/js\/app.js)$/,
"js/jquery.js": /^(node_modules\/jquery\/dist\/jquery\.js)$/,
"js/dygraph.js": /^(node_modules\/dygraphs\/dygraph-combined-dev\.js)$/,
"js/vendor.js": /^(web\/static\/vendor)/
}
//
// To change the order of concatenation of files, explicitly mention here
// https://github.com/brunch/brunch/tree/master/docs#concatenation
// order: {
// before: [
// 'web/static/vendor/js/jquery-2.1.1.js',
// 'web/static/vendor/js/bootstrap.min.js'
// "web/static/vendor/js/jquery-2.1.1.js",
// "web/static/vendor/js/bootstrap.min.js"
// ]
// }
},
stylesheets: {
joinTo: 'css/app.css'
joinTo: "css/app.css"
},
templates: {
joinTo: 'js/app.js'
joinTo: "js/app.js"
}
},
conventions: {
// This option sets where we should place non-css and non-js assets in.
// By default, we set this to "/web/static/assets". Files in this directory
// will be copied to `paths.public`, which is "priv/static" by default.
assets: /^(web\/static\/assets)/
},
// Phoenix paths configuration
paths: {
// Which directories to watch
watched: ["web/static", "test/static"],
// Dependencies and current project directories to watch
watched: [
"deps/phoenix/web/static",
"deps/phoenix_html/web/static",
"web/static",
"test/static"
],
// Where to compile files to
public: "priv/static"
@ -31,9 +51,19 @@ exports.config = {
// Configure your plugins
plugins: {
ES6to5: {
babel: {
// Do not use ES6 compiler in vendor code
ignore: [/^(web\/static\/vendor)/]
ignore: [/web\/static\/vendor/]
}
},
modules: {
autoRequire: {
"js/app.js": ["web/static/js/app"]
}
},
npm: {
enabled: true
}
};

View file

@ -8,9 +8,9 @@ use Mix.Config
# Configures the endpoint
config :katso, Katso.Endpoint,
url: [host: "localhost"],
root: Path.expand("..", __DIR__),
secret_key_base: "AwltP5KYMmwNj+7/UP47rHeKMX16cxP7uv0Csr+PoIZEFN2o090mzVvLbcvJeld1",
debug_errors: false,
root: Path.dirname(__DIR__),
secret_key_base: "ikWnS0ccKDnDW56giwdyvAS873ofbegfEAuSbDKCvFzBa9Kz12/lSAPUiCqzC5Ec",
render_errors: [accepts: ~w(html json)],
pubsub: [name: Katso.PubSub,
adapter: Phoenix.PubSub.PG2]
@ -22,3 +22,8 @@ config :logger, :console,
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
# Configure phoenix generators
config :phoenix, :generators,
migration: true,
binary_id: false

View file

@ -11,13 +11,14 @@ config :katso, Katso.Endpoint,
debug_errors: true,
code_reloader: true,
cache_static_lookup: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch"]]
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin"]]
# Watch static and templates for browser reloading.
config :katso, Katso.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif)$},
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
@ -26,10 +27,16 @@ config :katso, Katso.Endpoint,
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development.
# Do not configure such in production as keeping
# and calculating stacktraces is usually expensive.
config :phoenix, :stacktrace_depth, 20
# Configure your database
config :katso, Katso.Repo,
adapter: Ecto.Adapters.Postgres,
username: "katso",
password: "katso",
database: "katso",
hostname: "localhost"
username: "postgres",
password: "postgres",
database: "katso_dev",
hostname: "localhost",
pool_size: 10

View file

@ -6,26 +6,42 @@ use Mix.Config
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
#
# Finally, we also include the path to a manifest
# containing the digested version of static files. This
# manifest is generated by the mix phoenix.digest task
# which you typically run after static files are built.
config :katso, Katso.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com"]
url: [host: "example.com", port: 80],
cache_static_manifest: "priv/static/manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section:
# to the previous section and set your `:url` port to 443:
#
# config:katso, Katso.Endpoint,
# ...
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
# config :katso, Katso.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables point to a file on
# disk for the key and cert.
# Do not print debug messages in production
config :logger, level: :info
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :katso, Katso.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#

View file

@ -4,11 +4,12 @@ use Mix.Config
# you likely want to automate and keep it away from
# your version control system.
config :katso, Katso.Endpoint,
secret_key_base: "Pqf8JsBMaVsywaKVbsJFtsPPySK94cWLrvEgwENe37SBW5EiDO4J3F7AaZ5luBCY"
secret_key_base: "vPhy4euw5ZRCoD6Oh9apXEuvyGSxY8v6mXQOummG970OnVYxFfPmQ50uHAYIqErk"
# Configure your database
config :katso, Katso.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "katso_prod"
database: "katso_prod",
pool_size: 20

View file

@ -15,5 +15,5 @@ config :katso, Katso.Repo,
username: "postgres",
password: "postgres",
database: "katso_test",
size: 1,
max_overflow: false
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox

View file

@ -1,18 +1,25 @@
defmodule Katso.Endpoint do
use Phoenix.Endpoint, otp_app: :katso
# Serve at "/" the given assets from "priv/static" directory
socket "/socket", Katso.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/", from: :katso,
only: ~w(css images js favicon.ico robots.txt)
at: "/", from: :katso, gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
@ -26,8 +33,7 @@ defmodule Katso.Endpoint do
plug Plug.Session,
store: :cookie,
key: "_katso_key",
signing_salt: "hybxwdCF",
encryption_salt: "rGum4O3j"
signing_salt: "YAd3yiv6"
plug :router, Katso.Router
plug Katso.Router
end

View file

@ -10,6 +10,7 @@ defmodule Katso.PageAnalyzer do
alias Katso.FetchScore
alias Katso.Title
alias Katso.TitleScore
alias Katso.Series
import Ecto.Query, only: [from: 2]
@ -18,7 +19,8 @@ defmodule Katso.PageAnalyzer do
name: "Iltalehti",
url: "http://www.iltalehti.fi/",
rules: [
".otsikko"
".df-blk",
"p.even a"
]
},
@ -319,6 +321,8 @@ defmodule Katso.PageAnalyzer do
end
def store_data(data) do
series = create_series
Enum.each data, fn {site_key, scores} ->
query = from m in Magazine,
where: m.key == ^(Atom.to_string site_key)
@ -328,7 +332,7 @@ defmodule Katso.PageAnalyzer do
m -> m
end
fetch = create_fetch magazine, scores
fetch = create_fetch series, magazine, scores
Enum.each scores.score_types, fn score_type ->
create_fetch_score fetch, score_type
@ -336,7 +340,11 @@ defmodule Katso.PageAnalyzer do
Enum.reject(scores.matches, fn {_, score_types} -> score_types == [] end)
|> Enum.each fn {match, score_types} ->
title = create_title fetch, {match, score_types}
sum = Enum.reduce score_types, 0, fn {_, _, score_amount}, acc ->
acc + score_amount
end
title = create_title fetch, match, sum
Enum.each score_types, fn score_type ->
create_title_score title, score_type
@ -345,43 +353,73 @@ defmodule Katso.PageAnalyzer do
end
end
defp create_magazine(site, site_key) do
Repo.insert Magazine.changeset %Magazine{}, %{
name: site.name,
key: Atom.to_string(site_key)
}
defp create_series() do
%Series{}
|> Series.changeset(%{})
|> Repo.insert
|> ok_or_die
end
defp create_fetch(magazine, scores) do
Repo.insert Fetch.changeset %Fetch{}, %{
defp create_magazine(site, site_key) do
%Magazine{}
|> Magazine.changeset(%{
name: site.name,
key: Atom.to_string(site_key)
})
|> Repo.insert
|> ok_or_die
end
defp create_fetch(series, magazine, scores) do
%Fetch{}
|> Fetch.changeset(%{
series_id: series.id,
total_score: scores.total_score,
total_titles: scores.total_titles,
relative_score: scores.relative_score,
magazine_id: magazine.id
}
})
|> Repo.insert
|> ok_or_die
end
defp create_title(fetch, {title, _}) do
Repo.insert Title.changeset %Title{}, %{
defp create_title(fetch, title, total_score) do
%Title{}
|> Title.changeset(%{
title: title,
fetch_id: fetch.id
}
fetch_id: fetch.id,
total_score: total_score
})
|> Repo.insert
|> ok_or_die
end
defp create_title_score(title, {score_type, score_words, score_amount}) do
Repo.insert TitleScore.changeset %TitleScore{}, %{
%TitleScore{}
|> TitleScore.changeset(%{
score_type: Atom.to_string(score_type),
score_words: score_words,
score_amount: score_amount,
title_id: title.id
}
})
|> Repo.insert
|> ok_or_die
end
defp create_fetch_score(fetch, {score_type, score_amount}) do
Repo.insert FetchScore.changeset %FetchScore{}, %{
%FetchScore{}
|> FetchScore.changeset(%{
score_type: Atom.to_string(score_type),
score_amount: score_amount,
fetch_id: fetch.id
}
})
|> Repo.insert
|> ok_or_die
end
end
defp ok_or_die({:ok, result}), do: result
defp ok_or_die({:error, err}) do
raise err
end
end

View file

@ -45,4 +45,4 @@ defmodule Katso.Scraper do
Floki.find(parse_tree, rule)
|> Enum.concat results
end
end
end

View file

@ -11,13 +11,13 @@ defmodule Katso.TitleAnalyzer do
vau: %{
s: "Vau! Oho! Ja kaikenlainen muu ihmettely.",
r: ~r/\b(?:vau|oho|ohh?oh?|hups(?:is)?|huh)\b/iu,
p: 1
p: 5
},
nyt: %{
s: "Nyt puhuu X! Nyt se on X!",
r: ~r/\b(?:nyt se on|nyt puhu[uv])\b/iu,
p: 1
p: 3
},
pronominit: %{
@ -28,16 +28,17 @@ defmodule Katso.TitleAnalyzer do
tämänlaiset|tämänlaisia|tämänlaista|
nämä|näissä|näillä|näistä|näihin|näille|näiden|näiltä|näitä|näin|
hän|hänessä|hänellä|hänestä|häneen|hänelle|hänen|häneltä|häntä|
he|heissä|heillä|heistä|heihin|heille|heidän|heiltä|heitä|heistä
he|heissä|heillä|heistä|heihin|heille|heidän|heiltä|heitä|heistä|
ne|niissä|niillä|niistä|niihin|niille|niiden|niiltä|niitä|niistä
)(?:kin)?\b
/ixu,
p: 1
/iux,
p: 3
},
kysymys: %{
s: "Kysymykset otsikoissa. Yleensä näihin vastaus on ”ei”.",
r: ~r/\?/u,
p: 1
r: ~r/\?/,
p: 2
},
huuto: %{
@ -69,38 +70,42 @@ defmodule Katso.TitleAnalyzer do
|vihdoin
|avautu
|tilit(?:y|t)
|hyytävi?ä
|jäätävi?ä
|hyytäv
|jäätäv
|et usko
|kansa\b
|testaa\b
|arvaa
|keksi(?:\b|tkö)
|erikoi(?:s|n)
|nolo(?:\b|a|i)
|sensaatio
|omitui
|(?:päätä)?huim
)
/iux,
p: 1
p: 3
},
katso: %{
s: "Kehotus katsomaan jotain lisäsisältöä, joka lähes poikkeuksetta on hyödytöntä.",
r: ~r/\bkatso\b/u,
p: 1
r: ~r/\bkatso\b/iu,
p: 5
},
some: %{
s: "Sosiaalinen media on turhaa hömpötystä.",
r: ~r/\bsome|twiitt|peukut(?:u|t)/,
r: ~r/\bsome|twii?tt|peukut(?:u|t)|facebook|insta(?:gram)?|pinterest/iu,
p: 1
},
arvaatko: %{
s: "Arvaatko? Uskaltaisitko? Viitsisitkö? jne.",
r: ~r/tko\b|tkö\b/iu,
p: 2
}
}
def analyze(str) do
str = convert_utf8 str
Map.keys(@rules)
|> Enum.map(fn key ->
rule = @rules[key]
@ -119,18 +124,4 @@ defmodule Katso.TitleAnalyzer do
matches -> Enum.map matches, fn match -> Enum.at match, 0 end
end
end
def convert_utf8(str) do
case String.valid? str do
true -> str
false ->
String.codepoints(str)
|> Enum.reduce "", fn codepoint, acc ->
acc <> case codepoint do
<<byte>> -> <<byte :: utf8>>
char -> char
end
end
end
end
end

View file

@ -12,4 +12,4 @@ defmodule Katso.Utils do
end
end
end
end
end

39
mix.exs
View file

@ -3,39 +3,54 @@ defmodule Katso.Mixfile do
def project do
[app: :katso,
version: "0.0.1",
version: "0.0.2",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
aliases: aliases,
deps: deps]
end
# Configuration for the OTP application
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information
# Type `mix help compile.app` for more information.
def application do
[mod: {Katso, []},
applications: [:phoenix, :cowboy, :logger, :ecto, :httpoison]]
applications: [:phoenix, :phoenix_html, :cowboy, :logger,
:phoenix_ecto, :postgrex, :httpoison, :tzdata]]
end
# Specifies which paths to compile per environment
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options
# Type `mix help deps` for examples and options.
defp deps do
[{:phoenix, "~> 0.11"},
{:phoenix_ecto, "~> 0.3"},
[{:phoenix, "~> 1.0.3"},
{:phoenix_ecto, "~> 1.1"},
{:postgrex, ">= 0.0.0"},
{:phoenix_live_reload, "~> 0.3"},
{:phoenix_html, "~> 2.1"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:cowboy, "~> 1.0"},
{:excoder, "1.3.0", git: "git@bitbucket.org:Nicd/excoder.git"},
{:floki, "~> 0.1"},
{:httpoison, "~> 0.6"}
{:floki, "~> 0.7.0"},
{:httpoison, "~> 0.8.0"},
{:timex, "~> 1.0.0-rc3"}
]
end
# Aliases are shortcut or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"]]
end
end

View file

@ -1,21 +1,26 @@
%{"cowboy": {:hex, :cowboy, "1.0.0"},
"cowlib": {:hex, :cowlib, "1.0.1"},
%{"certifi": {:hex, :certifi, "0.3.0"},
"combine": {:hex, :combine, "0.5.4"},
"cowboy": {:hex, :cowboy, "1.0.4"},
"cowlib": {:hex, :cowlib, "1.0.2"},
"decimal": {:hex, :decimal, "1.1.0"},
"ecto": {:hex, :ecto, "0.10.2"},
"ecto": {:hex, :ecto, "1.0.6"},
"excoder": {:git, "git@bitbucket.org:Nicd/excoder.git", "274736c587c3b48fa4c86b86c94bf915977385d3", []},
"floki": {:hex, :floki, "0.1.1"},
"fs": {:hex, :fs, "0.9.1"},
"hackney": {:hex, :hackney, "1.1.0"},
"httpoison": {:hex, :httpoison, "0.6.2"},
"iconv": {:git, "https://github.com/erylee/erlang-iconv.git", "bd9ed8cc16ba3595fc6993dc2e6bf97273ce7f6a", []},
"floki": {:hex, :floki, "0.7.1"},
"fs": {:hex, :fs, "0.9.2"},
"hackney": {:hex, :hackney, "1.4.6"},
"httpoison": {:hex, :httpoison, "0.8.0"},
"idna": {:hex, :idna, "1.0.2"},
"mimerl": {:hex, :mimerl, "1.0.0"},
"mochiweb": {:hex, :mochiweb, "2.12.2"},
"phoenix": {:hex, :phoenix, "0.11.0"},
"phoenix_ecto": {:hex, :phoenix_ecto, "0.3.1"},
"phoenix_live_reload": {:hex, :phoenix_live_reload, "0.3.1"},
"plug": {:hex, :plug, "0.11.3"},
"poison": {:hex, :poison, "1.4.0"},
"poolboy": {:hex, :poolboy, "1.4.2"},
"postgrex": {:hex, :postgrex, "0.8.1"},
"ranch": {:hex, :ranch, "1.0.0"},
"ssl_verify_hostname": {:hex, :ssl_verify_hostname, "1.0.4"}}
"phoenix": {:hex, :phoenix, "1.0.3"},
"phoenix_ecto": {:hex, :phoenix_ecto, "1.2.0"},
"phoenix_html": {:hex, :phoenix_html, "2.2.0"},
"phoenix_live_reload": {:hex, :phoenix_live_reload, "1.0.1"},
"plug": {:hex, :plug, "1.0.3"},
"poison": {:hex, :poison, "1.5.0"},
"poolboy": {:hex, :poolboy, "1.5.1"},
"postgrex": {:hex, :postgrex, "0.9.1"},
"ranch": {:hex, :ranch, "1.2.0"},
"ssl_verify_hostname": {:hex, :ssl_verify_hostname, "1.0.5"},
"timex": {:hex, :timex, "1.0.0-rc3"},
"tzdata": {:hex, :tzdata, "0.5.5"}}

View file

@ -1,13 +1,13 @@
{
"repository": {
},
"repository": {},
"dependencies": {
"brunch": "git://github.com/brunch/brunch#5176b6b4bf70cd8cb9dad0058dd3e83e8d983218",
"babel-brunch": "^4.0.0",
"babel-brunch": "^5.1.1",
"brunch": "^1.8.5",
"clean-css-brunch": ">= 1.0 < 1.8",
"css-brunch": ">= 1.0 < 1.8",
"dygraphs": "^1.1.1",
"javascript-brunch": ">= 1.0 < 1.8",
"sass-brunch": "git://github.com/brunch/sass-brunch.git#master",
"jquery": "^2.1.4",
"uglify-js-brunch": ">= 1.0 < 1.8"
}
}

View file

@ -0,0 +1,13 @@
defmodule Katso.Repo.Migrations.AddSeriesTable do
use Ecto.Migration
def change do
create table(:series) do
timestamps
end
alter table(:fetches) do
add :series_id, references(:series)
end
end
end

View file

@ -0,0 +1,9 @@
defmodule Katso.Repo.Migrations.AddTitleTotalScore do
use Ecto.Migration
def change do
alter table(:titles) do
add :total_score, :integer
end
end
end

11
priv/repo/seeds.exs Normal file
View file

@ -0,0 +1,11 @@
# Script for populating the database. You can run it as:
#
# mix run priv/repo/seeds.exs
#
# Inside the script, you can read and write to any of your
# repositories directly:
#
# Katso.Repo.insert!(%SomeModel{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.

View file

@ -3,6 +3,6 @@ defmodule Katso.PageControllerTest do
test "GET /" do
conn = get conn(), "/"
assert conn.resp_body =~ "Welcome to Phoenix!"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end

View file

@ -0,0 +1,40 @@
defmodule Katso.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
alias Katso.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
# The default endpoint for testing
@endpoint Katso.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(Katso.Repo, [])
end
:ok
end
end

View file

@ -4,7 +4,7 @@ defmodule Katso.ConnCase do
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionalities to make it easier
imports other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
@ -20,12 +20,10 @@ defmodule Katso.ConnCase do
# Import conveniences for testing with connections
use Phoenix.ConnTest
# Alias the data repository and import query/model functions
alias Katso.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
# Import URL helpers from the router
import Katso.Router.Helpers
# The default endpoint for testing

View file

@ -0,0 +1,59 @@
defmodule Katso.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Katso.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
import Katso.ModelCase
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(Katso.Repo, [])
end
:ok
end
@doc """
Helper for returning list of errors in model when passed certain data.
## Examples
Given a User model that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(model, data) do
model.__struct__.changeset(model, data).errors
end
end

View file

@ -1,6 +1,6 @@
ExUnit.start
# Create the database, run migrations, and start the test transaction.
Mix.Task.run "ecto.create", ["--quiet"]
Mix.Task.run "ecto.migrate", ["--quiet"]
Ecto.Adapters.SQL.begin_test_transaction(Katso.Repo)

View file

@ -0,0 +1,21 @@
defmodule Katso.ErrorViewTest do
use Katso.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(Katso.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(Katso.ErrorView, "500.html", []) ==
"Server internal error"
end
test "render any other" do
assert render_to_string(Katso.ErrorView, "505.html", []) ==
"Server internal error"
end
end

View file

@ -0,0 +1,3 @@
defmodule Katso.LayoutViewTest do
use Katso.ConnCase, async: true
end

View file

@ -0,0 +1,3 @@
defmodule Katso.PageViewTest do
use Katso.ConnCase, async: true
end

View file

@ -0,0 +1,37 @@
defmodule Katso.UserSocket do
use Phoenix.Socket
## Channels
# channel "rooms:*", Katso.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
# transport :longpoll, Phoenix.Transports.LongPoll
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "users_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# Katso.Endpoint.broadcast("users_socket:" <> user.id, "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end

View file

@ -1,9 +1,53 @@
defmodule Katso.PageController do
use Katso.Web, :controller
plug :action
use Timex
alias Katso.Repo
alias Katso.Magazine
alias Katso.Series
alias Katso.Title
import Ecto.Query, only: [from: 2, order_by: 3]
def index(conn, _params) do
render conn, "index.html"
series_data =
from(s in Series,
join: f in assoc(s, :fetches),
join: m in assoc(f, :magazine),
preload: [fetches: {f, magazine: m}],
order_by: [asc: s.inserted_at])
|> Repo.all()
|> Enum.map(fn series ->
data = Enum.sort(series.fetches, fn first, second -> first.magazine.id < second.magazine.id end)
|> Enum.map(fn fetch -> fetch.relative_score end)
[series.inserted_at | data]
end)
|> Poison.Encoder.encode([])
magazines =
from(m in Magazine,
select: m.name,
order_by: [asc: m.id])
|> Repo.all
|> Poison.Encoder.encode([])
week_ago = Date.now |> Date.subtract(Time.to_timestamp(1, :weeks)) |> DateConvert.to_erlang_datetime
top_titles =
from(t in Title,
join: f in assoc(t, :fetch),
join: ts in assoc(t, :title_scores),
join: m in assoc(f, :magazine),
preload: [fetch: {f, magazine: m}, title_scores: ts],
where: f.inserted_at > ^week_ago,
order_by: [desc: t.total_score])
|> Repo.all
|> Enum.slice(0..10)
conn
|> assign(:all_series, series_data)
|> assign(:magazines, magazines)
|> assign(:top_titles, top_titles)
|> render("index.html")
end
end

View file

@ -9,11 +9,12 @@ defmodule Katso.Fetch do
timestamps
belongs_to :magazine, Katso.Magazine
belongs_to :series, Katso.Series
has_many :titles, Katso.Title
has_many :fetch_scores, Katso.FetchScore
end
@required_fields ~w(total_score total_titles relative_score magazine_id)
@required_fields ~w(total_score total_titles relative_score series_id magazine_id)
@optional_fields ~w()
def changeset(model, params \\ nil) do
@ -21,3 +22,11 @@ defmodule Katso.Fetch do
|> cast(params, @required_fields, @optional_fields)
end
end
defimpl Poison.Encoder, for: Katso.Fetch do
def encode(model, opts) do
model
|> Map.take([:total_score, :total_titles, :relative_score, :magazine, :inserted_at, :updated_at])
|> Poison.Encoder.encode(opts)
end
end

View file

@ -16,3 +16,11 @@ defmodule Katso.Magazine do
|> cast(params, @required_fields, @optional_fields)
end
end
defimpl Poison.Encoder, for: Katso.Magazine do
def encode(model, opts) do
model
|> Map.take([:name, :key])
|> Poison.Encoder.encode(opts)
end
end

24
web/models/series.ex Normal file
View file

@ -0,0 +1,24 @@
defmodule Katso.Series do
use Katso.Web, :model
schema "series" do
timestamps
has_many :fetches, Katso.Fetch
end
@required_fields ~w()
@optional_fields ~w()
def changeset(model, params \\ nil) do
model
|> cast(params, @required_fields, @optional_fields)
end
end
defimpl Poison.Encoder, for: Katso.Series do
def encode(model, opts) do
model
|> Map.take([:inserted_at, :updated_at, :fetches])
|> Poison.Encoder.encode(opts)
end
end

View file

@ -3,12 +3,13 @@ defmodule Katso.Title do
schema "titles" do
field :title, :string
field :total_score, :integer
belongs_to :fetch, Katso.Fetch
has_many :title_scores, Katso.TitleScore
end
@required_fields ~w(title fetch_id)
@required_fields ~w(title fetch_id total_score)
@optional_fields ~w()
def changeset(model, params \\ nil) do

View file

@ -1,11 +1,16 @@
defmodule Katso.Router do
use Phoenix.Router
use Katso.Web, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
end
scope "/", Katso do
@ -13,4 +18,9 @@ defmodule Katso.Router do
get "/", PageController, :index
end
# Other scopes may use custom stacks.
# scope "/api", Katso do
# pipe_through :api
# end
end

View file

@ -0,0 +1,5 @@
# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-agent: *
# Disallow: /

File diff suppressed because one or more lines are too long

View file

@ -1,10 +1,21 @@
import {Socket} from "phoenix"
// Brunch automatically concatenates all files in your
// watched paths. Those paths can be configured at
// config.paths.watched in "brunch-config.js".
//
// However, those files will only be executed if
// explicitly imported. The only exception are files
// in vendor, which are never wrapped in imports and
// therefore are always executed.
// let socket = new Socket("/ws")
// socket.join("topic:subtopic", {}, chan => {
// })
// Import dependencies
//
// If you no longer want to use a dependency, remember
// to also remove its path from "config.paths.watched".
import "deps/phoenix_html/web/static/js/phoenix_html"
let App = {
}
// Import local files
//
// Local files can be imported directly using relative
// paths "./socket" or full ones "web/static/js/socket".
export default App
// import socket from "./socket"

View file

@ -1,763 +0,0 @@
(function(/*! Brunch !*/) {
'use strict';
var globals = typeof window !== 'undefined' ? window : global;
if (typeof globals.require === 'function') return;
var modules = {};
var cache = {};
var has = function(object, name) {
return ({}).hasOwnProperty.call(object, name);
};
var expand = function(root, name) {
var results = [], parts, part;
if (/^\.\.?(\/|$)/.test(name)) {
parts = [root, name].join('/').split('/');
} else {
parts = name.split('/');
}
for (var i = 0, length = parts.length; i < length; i++) {
part = parts[i];
if (part === '..') {
results.pop();
} else if (part !== '.' && part !== '') {
results.push(part);
}
}
return results.join('/');
};
var dirname = function(path) {
return path.split('/').slice(0, -1).join('/');
};
var localRequire = function(path) {
return function(name) {
var dir = dirname(path);
var absolute = expand(dir, name);
return globals.require(absolute, path);
};
};
var initModule = function(name, definition) {
var module = {id: name, exports: {}};
cache[name] = module;
definition(module.exports, localRequire(name), module);
return module.exports;
};
var require = function(name, loaderPath) {
var path = expand(name, '.');
if (loaderPath == null) loaderPath = '/';
if (has(cache, path)) return cache[path].exports;
if (has(modules, path)) return initModule(path, modules[path]);
var dirIndex = expand(path, './index');
if (has(cache, dirIndex)) return cache[dirIndex].exports;
if (has(modules, dirIndex)) return initModule(dirIndex, modules[dirIndex]);
throw new Error('Cannot find module "' + name + '" from '+ '"' + loaderPath + '"');
};
var define = function(bundle, fn) {
if (typeof bundle === 'object') {
for (var key in bundle) {
if (has(bundle, key)) {
modules[key] = bundle[key];
}
}
} else {
modules[bundle] = fn;
}
};
var list = function() {
var result = [];
for (var item in modules) {
if (has(modules, item)) {
result.push(item);
}
}
return result;
};
globals.require = require;
globals.require.define = define;
globals.require.register = define;
globals.require.list = list;
globals.require.brunch = true;
})();
require.define({'phoenix': function(exports, require, module){ "use strict";
var _classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } };
var SOCKET_STATES = { connecting: 0, open: 1, closing: 2, closed: 3 };
var CHANNEL_EVENTS = {
close: "phx_close",
error: "phx_error",
join: "phx_join",
reply: "phx_reply",
leave: "phx_leave"
};
var Push = (function () {
// Initializes the Push
//
// chan - The Channel
// event - The event, ie `"phx_join"`
// payload - The payload, ie `{user_id: 123}`
// mergePush - The optional `Push` to merge hooks from
function Push(chan, event, payload, mergePush) {
var _this = this;
_classCallCheck(this, Push);
this.chan = chan;
this.event = event;
this.payload = payload || {};
this.receivedResp = null;
this.afterHooks = [];
this.recHooks = {};
this.sent = false;
if (mergePush) {
mergePush.afterHooks.forEach(function (hook) {
return _this.after(hook.ms, hook.callback);
});
for (var status in mergePush.recHooks) {
if (mergePush.recHooks.hasOwnProperty(status)) {
this.receive(status, mergePush.recHooks[status]);
}
}
}
}
Push.prototype.send = function send() {
var _this = this;
var ref = this.chan.socket.makeRef();
var refEvent = this.chan.replyEventName(ref);
this.chan.on(refEvent, function (payload) {
_this.receivedResp = payload;
_this.matchReceive(payload);
_this.chan.off(refEvent);
_this.cancelAfters();
});
this.startAfters();
this.sent = true;
this.chan.socket.push({
topic: this.chan.topic,
event: this.event,
payload: this.payload,
ref: ref
});
};
Push.prototype.receive = function receive(status, callback) {
if (this.receivedResp && this.receivedResp.status === status) {
callback(this.receivedResp.response);
}
this.recHooks[status] = callback;
return this;
};
Push.prototype.after = function after(ms, callback) {
var timer = null;
if (this.sent) {
timer = setTimeout(callback, ms);
}
this.afterHooks.push({ ms: ms, callback: callback, timer: timer });
return this;
};
// private
Push.prototype.matchReceive = function matchReceive(_ref) {
var status = _ref.status;
var response = _ref.response;
var ref = _ref.ref;
var callback = this.recHooks[status];
if (!callback) {
return;
}
if (this.event === CHANNEL_EVENTS.join) {
callback(this.chan);
} else {
callback(response);
}
};
Push.prototype.cancelAfters = function cancelAfters() {
this.afterHooks.forEach(function (hook) {
clearTimeout(hook.timer);
hook.timer = null;
});
};
Push.prototype.startAfters = function startAfters() {
this.afterHooks.map(function (hook) {
if (!hook.timer) {
hook.timer = setTimeout(function () {
return hook.callback();
}, hook.ms);
}
});
};
return Push;
})();
var Channel = exports.Channel = (function () {
function Channel(topic, message, callback, socket) {
_classCallCheck(this, Channel);
this.topic = topic;
this.message = message;
this.callback = callback;
this.socket = socket;
this.bindings = [];
this.afterHooks = [];
this.recHooks = {};
this.joinPush = new Push(this, CHANNEL_EVENTS.join, this.message);
this.reset();
}
Channel.prototype.after = function after(ms, callback) {
this.joinPush.after(ms, callback);
return this;
};
Channel.prototype.receive = function receive(status, callback) {
this.joinPush.receive(status, callback);
return this;
};
Channel.prototype.rejoin = function rejoin() {
this.reset();
this.joinPush.send();
};
Channel.prototype.onClose = function onClose(callback) {
this.on(CHANNEL_EVENTS.close, callback);
};
Channel.prototype.onError = function onError(callback) {
var _this = this;
this.on(CHANNEL_EVENTS.error, function (reason) {
callback(reason);
_this.trigger(CHANNEL_EVENTS.close, "error");
});
};
Channel.prototype.reset = function reset() {
var _this = this;
this.bindings = [];
var newJoinPush = new Push(this, CHANNEL_EVENTS.join, this.message, this.joinPush);
this.joinPush = newJoinPush;
this.onError(function (reason) {
setTimeout(function () {
return _this.rejoin();
}, _this.socket.reconnectAfterMs);
});
this.on(CHANNEL_EVENTS.reply, function (payload) {
_this.trigger(_this.replyEventName(payload.ref), payload);
});
};
Channel.prototype.on = function on(event, callback) {
this.bindings.push({ event: event, callback: callback });
};
Channel.prototype.isMember = function isMember(topic) {
return this.topic === topic;
};
Channel.prototype.off = function off(event) {
this.bindings = this.bindings.filter(function (bind) {
return bind.event !== event;
});
};
Channel.prototype.trigger = function trigger(triggerEvent, msg) {
this.bindings.filter(function (bind) {
return bind.event === triggerEvent;
}).map(function (bind) {
return bind.callback(msg);
});
};
Channel.prototype.push = function push(event, payload) {
var pushEvent = new Push(this, event, payload);
pushEvent.send();
return pushEvent;
};
Channel.prototype.replyEventName = function replyEventName(ref) {
return "chan_reply_" + ref;
};
Channel.prototype.leave = function leave() {
var _this = this;
return this.push(CHANNEL_EVENTS.leave).receive("ok", function () {
_this.socket.leave(_this);
chan.reset();
});
};
return Channel;
})();
var Socket = exports.Socket = (function () {
// Initializes the Socket
//
// endPoint - The string WebSocket endpoint, ie, "ws://example.com/ws",
// "wss://example.com"
// "/ws" (inherited host & protocol)
// opts - Optional configuration
// transport - The Websocket Transport, ie WebSocket, Phoenix.LongPoller.
// Defaults to WebSocket with automatic LongPoller fallback.
// heartbeatIntervalMs - The millisec interval to send a heartbeat message
// reconnectAfterMs - The millisec interval to reconnect after connection loss
// logger - The optional function for specialized logging, ie:
// `logger: function(msg){ console.log(msg) }`
// longpoller_timeout - The maximum timeout of a long poll AJAX request.
// Defaults to 20s (double the server long poll timer).
//
// For IE8 support use an ES5-shim (https://github.com/es-shims/es5-shim)
//
function Socket(endPoint) {
var opts = arguments[1] === undefined ? {} : arguments[1];
_classCallCheck(this, Socket);
this.states = SOCKET_STATES;
this.stateChangeCallbacks = { open: [], close: [], error: [], message: [] };
this.flushEveryMs = 50;
this.reconnectTimer = null;
this.channels = [];
this.sendBuffer = [];
this.ref = 0;
this.transport = opts.transport || window.WebSocket || LongPoller;
this.heartbeatIntervalMs = opts.heartbeatIntervalMs || 30000;
this.reconnectAfterMs = opts.reconnectAfterMs || 5000;
this.logger = opts.logger || function () {}; // noop
this.longpoller_timeout = opts.longpoller_timeout || 20000;
this.endPoint = this.expandEndpoint(endPoint);
this.resetBufferTimer();
}
Socket.prototype.protocol = function protocol() {
return location.protocol.match(/^https/) ? "wss" : "ws";
};
Socket.prototype.expandEndpoint = function expandEndpoint(endPoint) {
if (endPoint.charAt(0) !== "/") {
return endPoint;
}
if (endPoint.charAt(1) === "/") {
return "" + this.protocol() + ":" + endPoint;
}
return "" + this.protocol() + "://" + location.host + "" + endPoint;
};
Socket.prototype.disconnect = function disconnect(callback, code, reason) {
if (this.conn) {
this.conn.onclose = function () {}; // noop
if (code) {
this.conn.close(code, reason || "");
} else {
this.conn.close();
}
this.conn = null;
}
callback && callback();
};
Socket.prototype.connect = function connect() {
var _this = this;
this.disconnect(function () {
_this.conn = new _this.transport(_this.endPoint);
_this.conn.timeout = _this.longpoller_timeout;
_this.conn.onopen = function () {
return _this.onConnOpen();
};
_this.conn.onerror = function (error) {
return _this.onConnError(error);
};
_this.conn.onmessage = function (event) {
return _this.onConnMessage(event);
};
_this.conn.onclose = function (event) {
return _this.onConnClose(event);
};
});
};
Socket.prototype.resetBufferTimer = function resetBufferTimer() {
var _this = this;
clearTimeout(this.sendBufferTimer);
this.sendBufferTimer = setTimeout(function () {
return _this.flushSendBuffer();
}, this.flushEveryMs);
};
// Logs the message. Override `this.logger` for specialized logging. noops by default
Socket.prototype.log = function log(msg) {
this.logger(msg);
};
// Registers callbacks for connection state change events
//
// Examples
//
// socket.onError function(error){ alert("An error occurred") }
//
Socket.prototype.onOpen = function onOpen(callback) {
this.stateChangeCallbacks.open.push(callback);
};
Socket.prototype.onClose = function onClose(callback) {
this.stateChangeCallbacks.close.push(callback);
};
Socket.prototype.onError = function onError(callback) {
this.stateChangeCallbacks.error.push(callback);
};
Socket.prototype.onMessage = function onMessage(callback) {
this.stateChangeCallbacks.message.push(callback);
};
Socket.prototype.onConnOpen = function onConnOpen() {
var _this = this;
clearInterval(this.reconnectTimer);
if (!this.conn.skipHeartbeat) {
clearInterval(this.heartbeatTimer);
this.heartbeatTimer = setInterval(function () {
return _this.sendHeartbeat();
}, this.heartbeatIntervalMs);
}
this.rejoinAll();
this.stateChangeCallbacks.open.forEach(function (callback) {
return callback();
});
};
Socket.prototype.onConnClose = function onConnClose(event) {
var _this = this;
this.log("WS close:");
this.log(event);
clearInterval(this.reconnectTimer);
clearInterval(this.heartbeatTimer);
this.reconnectTimer = setInterval(function () {
return _this.connect();
}, this.reconnectAfterMs);
this.stateChangeCallbacks.close.forEach(function (callback) {
return callback(event);
});
};
Socket.prototype.onConnError = function onConnError(error) {
this.log("WS error:");
this.log(error);
this.stateChangeCallbacks.error.forEach(function (callback) {
return callback(error);
});
};
Socket.prototype.connectionState = function connectionState() {
switch (this.conn && this.conn.readyState) {
case this.states.connecting:
return "connecting";
case this.states.open:
return "open";
case this.states.closing:
return "closing";
default:
return "closed";
}
};
Socket.prototype.isConnected = function isConnected() {
return this.connectionState() === "open";
};
Socket.prototype.rejoinAll = function rejoinAll() {
this.channels.forEach(function (chan) {
return chan.rejoin();
});
};
Socket.prototype.join = function join(topic, message, callback) {
var chan = new Channel(topic, message, callback, this);
this.channels.push(chan);
if (this.isConnected()) {
chan.rejoin();
}
return chan;
};
Socket.prototype.leave = function leave(chan) {
this.channels = this.channels.filter(function (c) {
return !c.isMember(chan.topic);
});
};
Socket.prototype.push = function push(data) {
var _this = this;
var callback = function () {
return _this.conn.send(JSON.stringify(data));
};
if (this.isConnected()) {
callback();
} else {
this.sendBuffer.push(callback);
}
};
// Return the next message ref, accounting for overflows
Socket.prototype.makeRef = function makeRef() {
var newRef = this.ref + 1;
if (newRef === this.ref) {
this.ref = 0;
} else {
this.ref = newRef;
}
return this.ref.toString();
};
Socket.prototype.sendHeartbeat = function sendHeartbeat() {
this.push({ topic: "phoenix", event: "heartbeat", payload: {}, ref: this.makeRef() });
};
Socket.prototype.flushSendBuffer = function flushSendBuffer() {
if (this.isConnected() && this.sendBuffer.length > 0) {
this.sendBuffer.forEach(function (callback) {
return callback();
});
this.sendBuffer = [];
}
this.resetBufferTimer();
};
Socket.prototype.onConnMessage = function onConnMessage(rawMessage) {
this.log("message received:");
this.log(rawMessage);
var _JSON$parse = JSON.parse(rawMessage.data);
var topic = _JSON$parse.topic;
var event = _JSON$parse.event;
var payload = _JSON$parse.payload;
this.channels.filter(function (chan) {
return chan.isMember(topic);
}).forEach(function (chan) {
return chan.trigger(event, payload);
});
this.stateChangeCallbacks.message.forEach(function (callback) {
callback(topic, event, payload);
});
};
return Socket;
})();
var LongPoller = exports.LongPoller = (function () {
function LongPoller(endPoint) {
_classCallCheck(this, LongPoller);
this.retryInMs = 5000;
this.endPoint = null;
this.token = null;
this.sig = null;
this.skipHeartbeat = true;
this.onopen = function () {}; // noop
this.onerror = function () {}; // noop
this.onmessage = function () {}; // noop
this.onclose = function () {}; // noop
this.states = SOCKET_STATES;
this.upgradeEndpoint = this.normalizeEndpoint(endPoint);
this.pollEndpoint = this.upgradeEndpoint + (/\/$/.test(endPoint) ? "poll" : "/poll");
this.readyState = this.states.connecting;
this.poll();
}
LongPoller.prototype.normalizeEndpoint = function normalizeEndpoint(endPoint) {
return endPoint.replace("ws://", "http://").replace("wss://", "https://");
};
LongPoller.prototype.endpointURL = function endpointURL() {
return this.pollEndpoint + ("?token=" + encodeURIComponent(this.token) + "&sig=" + encodeURIComponent(this.sig));
};
LongPoller.prototype.closeAndRetry = function closeAndRetry() {
this.close();
this.readyState = this.states.connecting;
};
LongPoller.prototype.ontimeout = function ontimeout() {
this.onerror("timeout");
this.closeAndRetry();
};
LongPoller.prototype.poll = function poll() {
var _this = this;
if (!(this.readyState === this.states.open || this.readyState === this.states.connecting)) {
return;
}
Ajax.request("GET", this.endpointURL(), "application/json", null, this.timeout, this.ontimeout.bind(this), function (resp) {
if (resp) {
var status = resp.status;
var token = resp.token;
var sig = resp.sig;
var messages = resp.messages;
_this.token = token;
_this.sig = sig;
} else {
var status = 0;
}
switch (status) {
case 200:
messages.forEach(function (msg) {
return _this.onmessage({ data: JSON.stringify(msg) });
});
_this.poll();
break;
case 204:
_this.poll();
break;
case 410:
_this.readyState = _this.states.open;
_this.onopen();
_this.poll();
break;
case 0:
case 500:
_this.onerror();
_this.closeAndRetry();
break;
default:
throw "unhandled poll status " + status;
}
});
};
LongPoller.prototype.send = function send(body) {
var _this = this;
Ajax.request("POST", this.endpointURL(), "application/json", body, this.timeout, this.onerror.bind(this, "timeout"), function (resp) {
if (!resp || resp.status !== 200) {
_this.onerror(status);
_this.closeAndRetry();
}
});
};
LongPoller.prototype.close = function close(code, reason) {
this.readyState = this.states.closed;
this.onclose();
};
return LongPoller;
})();
var Ajax = exports.Ajax = (function () {
function Ajax() {
_classCallCheck(this, Ajax);
}
Ajax.request = function request(method, endPoint, accept, body, timeout, ontimeout, callback) {
if (window.XDomainRequest) {
var req = new XDomainRequest(); // IE8, IE9
this.xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback);
} else {
var req = window.XMLHttpRequest ? new XMLHttpRequest() : // IE7+, Firefox, Chrome, Opera, Safari
new ActiveXObject("Microsoft.XMLHTTP"); // IE6, IE5
this.xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback);
}
};
Ajax.xdomainRequest = function xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback) {
var _this = this;
req.timeout = timeout;
req.open(method, endPoint);
req.onload = function () {
var response = _this.parseJSON(req.responseText);
callback && callback(response);
};
if (ontimeout) {
req.ontimeout = ontimeout;
}
// Work around bug in IE9 that requires an attached onprogress handler
req.onprogress = function () {};
req.send(body);
};
Ajax.xhrRequest = function xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback) {
var _this = this;
req.timeout = timeout;
req.open(method, endPoint, true);
req.setRequestHeader("Content-Type", accept);
req.onerror = function () {
callback && callback(null);
};
req.onreadystatechange = function () {
if (req.readyState === _this.states.complete && callback) {
var response = _this.parseJSON(req.responseText);
callback(response);
}
};
if (ontimeout) {
req.ontimeout = ontimeout;
}
req.send(body);
};
Ajax.parseJSON = function parseJSON(resp) {
return resp && resp !== "" ? JSON.parse(resp) : null;
};
return Ajax;
})();
Ajax.states = { complete: 4 };
exports.__esModule = true;
}});
if(typeof(window) === 'object' && !window.Phoenix){ window.Phoenix = require('phoenix') };

View file

@ -0,0 +1,25 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="">
<meta name="author" content="">
<title>Oho! Katso ällistyttävät clickbait-tilastot!</title>
<link rel="stylesheet" href="<%= static_path(@conn, "/css/app.css") %>">
<script type="text/javascript" src="<%= static_path(@conn, "/js/jquery.js") %>"></script>
<script type="text/javascript" src="<%= static_path(@conn, "/js/vendor.js") %>"></script>
<script type="text/javascript" src="<%= static_path(@conn, "/js/dygraph.js") %>"></script>
</head>
<body>
<div class="container" role="main">
<%= @inner %>
</div> <!-- /container -->
<script type="text/javascript" src="<%= static_path(@conn, "/js/app.js") %>"></script>
</body>
</html>

View file

@ -1,35 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="">
<meta name="author" content="">
<title>Oho! Katso kuvat ja tilastot! Arvaatko, mikä lehti on surkein?</title>
<link rel="stylesheet" href="<%= static_path(@conn, "/css/app.css") %>">
</head>
<body>
<div class="container">
<div class="header">
<h1>
Oho! Katso kuvat!
</h1>
</div>
<p class="alert alert-info"><%= get_flash(@conn, :info) %></p>
<p class="alert alert-danger"><%= get_flash(@conn, :error) %></p>
<%= @inner %>
<div class="footer">
<p>© Mikko Ahlroth 2015</p>
</div>
</div> <!-- /container -->
<script src="<%= static_path(@conn, "/js/app.js") %>"></script>
<script>require("web/static/js/app")</script>
</body>
</html>

View file

@ -1,10 +1,98 @@
<div class="jumbotron">
<h2>Onko tämä Internetin paras sivusto?</h2>
<p class="lead">Nyt se on tutkittu! Tämä outo sivusto selvittää, mikä nykyjournalismissa on vikana. Vai onko?</p>
<h1>
Huhhuh! Et usko, kuinka pitkälle nämä lehdet menivät saadakseen klikkauksia!
</h1>
<p>
Katso järkyttävät tilastot alta.
</p>
</div>
<div class="row">
<div class="col-xs-12">
<h1>Tämän hetken lööpeimmät</h1>
</div>
</div>
<h2>
Mitä tämä on?
</h2>
<p>
Verkkojournalismiin on nykyisin syöpynyt huolestuttava ilmiö, <em>clickbait</em>. Clickbait tarkoittaa lukijoiden
houkuttelemista otsikoilla, jotka dramatisoivat ja revittelevät, vaikka itse uutinen olisikin täysin tavanomainen.
Samalla otsikot välttävät paljastamasta, mistä asiassa on oikeasti kyse, jotta lukijan olisi pakko avata artikkeli
selvittääkseen.
</p>
<p>
Tämä sivusto analysoi joukon suomalaisia verkkolehtiä käyttäen huipputieteellistä algoritmia. Se etsii valtakunnan
pahimmat clickbait-otsikot ja surkeimmat lehdet, jotta sinä voit tehdä järkeviä päätöksiä siitä, minkä lukemiseen
käytät aikaasi.
</p>
<hr />
<h2>
Lehtien clickbait-pisteiden historia
</h2>
<div id="all_series" style="width:100%; height:400px;"></div>
<hr />
<h2>
Tämän hetken 💩:t otsikot
</h2>
<table class="table" id="top_title_table">
<thead>
<tr>
<!--<th></th>-->
<th>Pst</th>
<th>Lehti</th>
<th>Otsikko</th>
<th>Syyt</th>
<th>Haettu</th>
</tr>
</thead>
<tfoot>
<tr>
<!--<th></th>-->
<th>Pst</th>
<th>Lehti</th>
<th>Otsikko</th>
<th>Syyt</th>
<th>Haettu</th>
</tr>
</tfoot>
<tbody>
<%= for {title, i} <- Enum.with_index @top_titles do %>
<tr>
<!--<td><%= i + 1 %></td>-->
<td><%= title.total_score %></td>
<td><%= title.fetch.magazine.name %></td>
<td><%= title.title %></td>
<td>
<%= for title_score <- title.title_scores do %>
<%= title_score.score_type %>: <%= title_score.score_amount %><br />
<% end %>
</td>
<td><%= title.fetch.inserted_at %></td>
</tr>
<% end %>
</tbody>
</table>
<script type="text/javascript">
var $ = require('jquery');
var Dygraph = require('dygraphs');
var elem = document.getElementById('all_series');
var data = <%= raw @all_series %>;
data.map(function (elem) {
elem[0] = new Date(elem[0]);
return elem;
});
var labels = <%= raw @magazines %>;
labels = ["x"].concat(labels);
new Dygraph(elem, data, {
labels: labels
});
</script>

View file

@ -2,11 +2,11 @@ defmodule Katso.ErrorView do
use Katso.Web, :view
def render("404.html", _assigns) do
"Page not found - 404"
"Page not found"
end
def render("500.html", _assigns) do
"Server internal error - 500"
"Server internal error"
end
# In case no render clause matches or no

View file

@ -19,6 +19,9 @@ defmodule Katso.Web do
def model do
quote do
use Ecto.Model
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
end
end
@ -26,12 +29,10 @@ defmodule Katso.Web do
quote do
use Phoenix.Controller
# Alias the data repository and import query/model functions
alias Katso.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
import Ecto.Query, only: [from: 1, from: 2]
# Import URL helpers from the router
import Katso.Router.Helpers
end
end
@ -41,13 +42,18 @@ defmodule Katso.Web do
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2]
# Import URL helpers from the router
import Katso.Router.Helpers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import Katso.Router.Helpers
end
end
def router do
quote do
use Phoenix.Router
end
end
@ -55,11 +61,9 @@ defmodule Katso.Web do
quote do
use Phoenix.Channel
# Alias the data repository and import query/model functions
alias Katso.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
import Ecto.Query, only: [from: 1, from: 2]
end
end