Initial done

This commit is contained in:
Mikko Ahlroth 2015-04-18 23:03:53 +03:00
commit 8d0806f6f0
39 changed files with 2138 additions and 0 deletions

16
.gitignore vendored Normal file
View file

@ -0,0 +1,16 @@
# Mix artifacts
/_build
/deps
/*.ez
# Generate on crash by the VM
erl_crash.dump
# Static artifacts
/node_modules
# Since we are building js and css from web/static,
# we ignore priv/static/{css,js}. You may want to
# comment this depending on your deployment strategy.
/priv/static/css
/priv/static/js

8
README.md Normal file
View file

@ -0,0 +1,8 @@
# Katso
To start your new Phoenix application:
1. Install dependencies with `mix deps.get`
2. Start Phoenix endpoint with `mix phoenix.server`
Now you can visit `localhost:4000` from your browser.

39
brunch-config.js Normal file
View file

@ -0,0 +1,39 @@
exports.config = {
// See http://brunch.io/#documentation for docs.
files: {
javascripts: {
joinTo: 'js/app.js'
// To change the order of concatenation of files, explictly mention here
// https://github.com/brunch/brunch/tree/stable/docs#concatenation
// order: {
// before: [
// 'web/static/vendor/js/jquery-2.1.1.js',
// 'web/static/vendor/js/bootstrap.min.js'
// ]
// }
},
stylesheets: {
joinTo: 'css/app.css'
},
templates: {
joinTo: 'js/app.js'
}
},
// Phoenix paths configuration
paths: {
// Which directories to watch
watched: ["web/static", "test/static"],
// Where to compile files to
public: "priv/static"
},
// Configure your plugins
plugins: {
ES6to5: {
// Do not use ES6 compiler in vendor code
ignore: [/^(web\/static\/vendor)/]
}
}
};

24
config/config.exs Normal file
View file

@ -0,0 +1,24 @@
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# Configures the endpoint
config :katso, Katso.Endpoint,
url: [host: "localhost"],
root: Path.expand("..", __DIR__),
secret_key_base: "AwltP5KYMmwNj+7/UP47rHeKMX16cxP7uv0Csr+PoIZEFN2o090mzVvLbcvJeld1",
debug_errors: false,
pubsub: [name: Katso.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"

35
config/dev.exs Normal file
View file

@ -0,0 +1,35 @@
use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :katso, Katso.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
cache_static_lookup: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch"]]
# Watch static and templates for browser reloading.
config :katso, Katso.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif)$},
~r{web/views/.*(ex)$},
~r{web/templates/.*(eex)$}
]
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Configure your database
config :katso, Katso.Repo,
adapter: Ecto.Adapters.Postgres,
username: "katso",
password: "katso",
database: "katso",
hostname: "localhost"

45
config/prod.exs Normal file
View file

@ -0,0 +1,45 @@
use Mix.Config
# For production, we configure the host to read the PORT
# from the system environment. Therefore, you will need
# to set PORT=80 before running your server.
#
# You should also configure the url host to something
# meaningful, we use this information when generating URLs.
config :katso, Katso.Endpoint,
http: [port: {:system, "PORT"}],
url: [host: "example.com"]
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section:
#
# config:katso, Katso.Endpoint,
# ...
# https: [port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables point to a file on
# disk for the key and cert.
# Do not print debug messages in production
config :logger, level: :info
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :katso, Katso.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
import_config "prod.secret.exs"

14
config/prod.secret.exs Normal file
View file

@ -0,0 +1,14 @@
use Mix.Config
# In this file, we keep production configuration that
# you likely want to automate and keep it away from
# your version control system.
config :katso, Katso.Endpoint,
secret_key_base: "Pqf8JsBMaVsywaKVbsJFtsPPySK94cWLrvEgwENe37SBW5EiDO4J3F7AaZ5luBCY"
# Configure your database
config :katso, Katso.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "katso_prod"

19
config/test.exs Normal file
View file

@ -0,0 +1,19 @@
use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :katso, Katso.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :katso, Katso.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "katso_test",
size: 1,
max_overflow: false

30
lib/katso.ex Normal file
View file

@ -0,0 +1,30 @@
defmodule Katso do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
# Start the endpoint when the application starts
supervisor(Katso.Endpoint, []),
# Start the Ecto repository
worker(Katso.Repo, []),
# Here you could define other workers and supervisors as children
# worker(Katso.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Katso.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Katso.Endpoint.config_change(changed, removed)
:ok
end
end

33
lib/katso/endpoint.ex Normal file
View file

@ -0,0 +1,33 @@
defmodule Katso.Endpoint do
use Phoenix.Endpoint, otp_app: :katso
# Serve at "/" the given assets from "priv/static" directory
plug Plug.Static,
at: "/", from: :katso,
only: ~w(css images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Poison
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session,
store: :cookie,
key: "_katso_key",
signing_salt: "hybxwdCF",
encryption_salt: "rGum4O3j"
plug :router, Katso.Router
end

387
lib/katso/pageanalyzer.ex Normal file
View file

@ -0,0 +1,387 @@
defmodule Katso.PageAnalyzer do
@moduledoc """
This module contains functionality to analyze a single page. First it scrapes the page using Scraper and then
calculates the scores using TitleAnalyzer.
"""
alias Katso.Repo
alias Katso.Magazine
alias Katso.Fetch
alias Katso.FetchScore
alias Katso.Title
alias Katso.TitleScore
import Ecto.Query, only: [from: 2]
@sites %{
iltalehti: %{
name: "Iltalehti",
url: "http://www.iltalehti.fi/",
rules: [
".otsikko"
]
},
iltasanomat: %{
name: "Ilta-Sanomat",
url: "http://www.iltasanomat.fi/",
rules: [
"h2",
"h3",
"a div p",
"a div.content"
]
},
aamulehti: %{
name: "Aamulehti",
url: "http://www.aamulehti.fi/",
rules: [
"h2 a",
"h3 a"
]
},
hs: %{
name: "Helsingin Sanomat",
url: "http://www.hs.fi/",
rules: [
"h2 a",
"h3 a",
"li div a"
]
},
ts: %{
name: "Turun Sanomat",
url: "http://www.ts.fi/",
rules: [
"h1"
]
},
ksml: %{
name: "Keskisuomalainen",
url: "http://www.ksml.fi/",
rules: [
"h1 a",
"h2 a",
"h3 a"
]
},
kymensanomat: %{
name: "Kymen Sanomat",
url: "http://www.kymensanomat.fi/",
rules: [
"h1 a",
"h2 a",
"div.news-title a"
]
},
etelasaimaa: %{
name: "Etelä-Saimaa",
url: "http://www.esaimaa.fi/",
rules: [
"h1 a",
"h2 a",
"div.news-title a"
]
},
kouvolansanomat: %{
name: "Kouvolan Sanomat",
url: "http://www.kouvolansanomat.fi/",
rules: [
"h1 a",
"h2 a",
"div.news-title a"
]
},
ess: %{
name: "Etelä-Suomen Sanomat",
url: "http://www.ess.fi/",
rules: [
"h1 a"
]
},
forssa: %{
name: "Forssan Lehti",
url: "http://www.forssanlehti.fi/",
rules: [
"h1 a",
"h3 a"
]
},
hameensanomat: %{
name: "Hämeen Sanomat",
url: "http://www.hameensanomat.fi/",
rules: [
"h1 a"
]
},
lapinkansa: %{
name: "Lapin Kansa",
url: "http://www.lapinkansa.fi/",
rules: [
"h2 a",
"li a"
]
},
yle: %{
name: "Yle Uutiset",
url: "http://yle.fi/uutiset/",
rules: [
"h1 a"
]
},
karjalainen: %{
name: "Karjalainen",
url: "http://www.karjalainen.fi/",
rules: [
"h1 a",
"h2 a",
"h3 a",
"h4 a"
]
},
kangasalan_sanomat: %{
name: "Kangasalan Sanomat",
url: "http://kangasalansanomat.fi/",
rules: [
"h1",
"h2"
]
},
kaleva: %{
name: "Kaleva",
url: "http://www.kaleva.fi/",
rules: [
"dd a",
"h2"
]
},
mtv: %{
name: "MTV Uutiset",
url: "http://www.mtv.fi/uutiset",
rules: [
"h2",
"div.related a",
"li a p",
"li a span",
"p.headline"
]
},
uusisuomi: %{
name: "Uusi Suomi",
url: "http://www.uusisuomi.fi/",
rules: [
"h2 a",
"h4 a"
]
},
ilkka: %{
name: "Ilkka",
url: "http://www.ilkka.fi/",
rules: [
"h1 a"
]
},
maaseuduntulevaisuus: %{
name: "Maaseudun Tulevaisuus",
url: "http://www.maaseuduntulevaisuus.fi/",
rules: [
"h2 a",
"span.title"
]
},
savonsanomat: %{
name: "Savon Sanomat",
url: "http://www.savonsanomat.fi/",
rules: [
"h1 a",
"h2 a",
"div.media-body a"
]
},
# taloussanomat: %{
# name: "Taloussanomat",
# url: "http://www.taloussanomat.fi/",
# rules: [
# "h1 a",
# "h2 a",
# "h3 a"
# ]
# },
verkkouutiset: %{
name: "Verkkouutiset",
url: "http://www.verkkouutiset.fi/",
rules: [
"span.headline"
]
},
}
def analyze_all() do
Map.keys(@sites)
|> Enum.map(fn site_key -> Task.async Katso.PageAnalyzer, :analyze, [site_key] end)
|> handle_responses
|> calculate_scores
|> reject_emptys
|> print_scores
|> store_data
end
def analyze(site_key) do
site = @sites[site_key]
data = Katso.Scraper.scrape(site)
|> Enum.map(fn title -> {title, Katso.TitleAnalyzer.analyze title} end)
IO.puts "Analyzed " <> site.name
{site_key, data}
end
def handle_responses(task_list, data \\ [])
def handle_responses([], data), do: data
def handle_responses(task_list, data) do
receive do
msg -> case Task.find task_list, msg do
nil -> handle_responses task_list, data
{result, task} ->
data = [handle_response(result) | data]
handle_responses List.delete(task_list, task), data
end
end
end
def handle_response({site_key, result}) do
initial_scores = %{
relative_score: 0,
total_score: 0,
score_types: [],
total_titles: 0,
matches: []
}
scores = result
|> Enum.reduce(initial_scores, fn {title, title_scores}, total_scores ->
title_scores
|> Enum.reduce(total_scores, fn {score_key, _, score_amount}, acc ->
acc
|> Map.put(:score_types, Keyword.put(acc.score_types, score_key, Keyword.get(acc.score_types, score_key, 0) + score_amount))
|> Map.put :total_score, acc.total_score + score_amount
end)
|> Map.put(:matches, [{title, title_scores} | total_scores.matches])
|> Map.put :total_titles, total_scores.total_titles + 1
end)
{site_key, scores}
end
def calculate_scores(data) do
Enum.map data, fn {site_key, scores} ->
case scores.total_titles do
0 -> nil
_ -> {site_key, %{scores | relative_score: ((scores.total_score / scores.total_titles * 100) |> Float.round |> trunc) }}
end
end
end
def reject_emptys(data) do
Enum.reject data, fn x -> x == nil end
end
def print_scores(data) do
Enum.map data, fn {site_key, scores} ->
IO.puts Atom.to_string(site_key) <> ": " <> Integer.to_string scores.relative_score
{site_key, scores}
end
end
def store_data(data) do
Enum.each data, fn {site_key, scores} ->
query = from m in Magazine,
where: m.key == ^(Atom.to_string site_key)
magazine = case Repo.one query do
nil -> create_magazine @sites[site_key], site_key
m -> m
end
fetch = create_fetch magazine, scores
Enum.each scores.score_types, fn score_type ->
create_fetch_score fetch, score_type
end
Enum.reject(scores.matches, fn {_, score_types} -> score_types == [] end)
|> Enum.each fn {match, score_types} ->
title = create_title fetch, {match, score_types}
Enum.each score_types, fn score_type ->
create_title_score title, score_type
end
end
end
end
defp create_magazine(site, site_key) do
Repo.insert Magazine.changeset %Magazine{}, %{
name: site.name,
key: Atom.to_string(site_key)
}
end
defp create_fetch(magazine, scores) do
Repo.insert Fetch.changeset %Fetch{}, %{
total_score: scores.total_score,
total_titles: scores.total_titles,
relative_score: scores.relative_score,
magazine_id: magazine.id
}
end
defp create_title(fetch, {title, _}) do
Repo.insert Title.changeset %Title{}, %{
title: title,
fetch_id: fetch.id
}
end
defp create_title_score(title, {score_type, score_words, score_amount}) do
Repo.insert TitleScore.changeset %TitleScore{}, %{
score_type: Atom.to_string(score_type),
score_words: score_words,
score_amount: score_amount,
title_id: title.id
}
end
defp create_fetch_score(fetch, {score_type, score_amount}) do
Repo.insert FetchScore.changeset %FetchScore{}, %{
score_type: Atom.to_string(score_type),
score_amount: score_amount,
fetch_id: fetch.id
}
end
end

3
lib/katso/repo.ex Normal file
View file

@ -0,0 +1,3 @@
defmodule Katso.Repo do
use Ecto.Repo, otp_app: :katso
end

48
lib/katso/scraper.ex Normal file
View file

@ -0,0 +1,48 @@
defmodule Katso.Scraper do
@moduledoc """
This module stores the list of sites to scrape and their scraping rules.
"""
@doc """
List of user agents to use, one will be picked randomly from this list.
"""
@uas [
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A",
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; AS; rv:11.0) like Gecko",
"Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; Trident/4.0; InfoPath.2; SV1; .NET CLR 2.0.50727; WOW64)",
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; Zune 4.0; InfoPath.3; MS-RTC LM 8; .NET4.0C; .NET4.0E)",
"Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0",
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0",
"Opera/9.80 (X11; Linux i686; Ubuntu/14.10) Presto/2.12.388 Version/12.16"
]
# Options for hackney
@hackney follow_redirect: true
@doc """
Scrape readable lines from the given site using the site's scraping rules.
"""
def scrape(site) do
case HTTPoison.get site.url, [{:"User-Agent", Enum.at(@uas, :random.uniform(Enum.count(@uas)) - 1)}], [hackney: @hackney] do
{:error, _} -> :error
{:ok, %HTTPoison.Response{body: html}} -> scrape site, html
end
end
def scrape(site, html) do
parse_tree = Floki.parse html
site.rules
|> Enum.map_reduce([], fn rule, acc -> {nil, do_scrape(parse_tree, rule, acc)} end)
|> Tuple.to_list
|> Enum.at(1)
|> Enum.map(fn elem -> Katso.Utils.convert_utf8 Floki.text elem end)
|> Enum.reject(fn elem -> String.strip(elem) == "" end)
end
def do_scrape(parse_tree, rule, results) do
Floki.find(parse_tree, rule)
|> Enum.concat results
end
end

136
lib/katso/titleanalyzer.ex Normal file
View file

@ -0,0 +1,136 @@
defmodule Katso.TitleAnalyzer do
@moduledoc """
This module contains the tools for analyzing a single title or piece of text.
"""
@doc """
The regex rules which are used to look up shitty stuff from text.
Also includes explanations and point ratings for them.
"""
@rules %{
vau: %{
s: "Vau! Oho! Ja kaikenlainen muu ihmettely.",
r: ~r/\b(?:vau|oho|ohh?oh?|hups(?:is)?|huh)\b/iu,
p: 1
},
nyt: %{
s: "Nyt puhuu X! Nyt se on X!",
r: ~r/\b(?:nyt se on|nyt puhu[uv])\b/iu,
p: 1
},
pronominit: %{
s: "Epämääräiset pronominit joilla vältellään kertomasta, mistä puhutaan. Hän, tämä, nämä jne.",
r: ~r/
\b(?:
tämä|tässä|tällä|tästä|tänne|tähän|tälle|tämän|tältä|tätä|tällaista|
tämänlaiset|tämänlaisia|tämänlaista|
nämä|näissä|näillä|näistä|näihin|näille|näiden|näiltä|näitä|näin|
hän|hänessä|hänellä|hänestä|häneen|hänelle|hänen|häneltä|häntä|
he|heissä|heillä|heistä|heihin|heille|heidän|heiltä|heitä|heistä
)(?:kin)?\b
/ixu,
p: 1
},
kysymys: %{
s: "Kysymykset otsikoissa. Yleensä näihin vastaus on ”ei”.",
r: ~r/\?/u,
p: 1
},
huuto: %{
s: "Huonoja otsikoita tehostetaan usein huutomerkillä!",
r: ~r/!/u,
p: 1
},
turhat_sanat: %{
s: "Sanoja, jotka yleensä merkitsevät turhaa lööppiä ja joilla yritetään saada siitä dramaattisemman kuuloinen.",
r: ~r/
\b(?:
kohu
|raivostu
|kauhunhetk
|seksi
|dramaatti
|julkkis
|bb\-
|outo
|rohke(?:at?|is)
|paljast
|raju
|skandaal
|mokat?\b
|ällistyttä
|mahtava
|uskomat(?:t|o)
|vihdoin
|avautu
|tilit(?:y|t)
|hyytävi?ä
|jäätävi?ä
|et usko
|kansa\b
|testaa\b
|arvaa
|keksi(?:\b|tkö)
|erikoi(?:s|n)
|nolo(?:\b|a|i)
|sensaatio
|omitui
)
/iux,
p: 1
},
katso: %{
s: "Kehotus katsomaan jotain lisäsisältöä, joka lähes poikkeuksetta on hyödytöntä.",
r: ~r/\bkatso\b/u,
p: 1
},
some: %{
s: "Sosiaalinen media on turhaa hömpötystä.",
r: ~r/\bsome|twiitt|peukut(?:u|t)/,
p: 1
}
}
def analyze(str) do
str = convert_utf8 str
Map.keys(@rules)
|> Enum.map(fn key ->
rule = @rules[key]
case run_re str, rule do
nil -> nil
matches -> {key, matches, Enum.count(matches) * rule.p}
end
end)
|> Enum.reject fn x -> x == nil end
end
def run_re(str, %{r: r}) do
case Regex.scan r, str do
[] -> nil
matches -> Enum.map matches, fn match -> Enum.at match, 0 end
end
end
def convert_utf8(str) do
case String.valid? str do
true -> str
false ->
String.codepoints(str)
|> Enum.reduce "", fn codepoint, acc ->
acc <> case codepoint do
<<byte>> -> <<byte :: utf8>>
char -> char
end
end
end
end
end

15
lib/katso/utils.ex Normal file
View file

@ -0,0 +1,15 @@
defmodule Katso.Utils do
def convert_utf8(str) do
case String.valid? str do
true -> str
false ->
String.codepoints(str)
|> Enum.reduce "", fn codepoint, acc ->
acc <> case codepoint do
<<byte>> -> <<byte :: utf8>>
char -> char
end
end
end
end
end

41
mix.exs Normal file
View file

@ -0,0 +1,41 @@
defmodule Katso.Mixfile do
use Mix.Project
def project do
[app: :katso,
version: "0.0.1",
elixir: "~> 1.0",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[mod: {Katso, []},
applications: [:phoenix, :cowboy, :logger, :ecto, :httpoison]]
end
# Specifies which paths to compile per environment
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
# Specifies your project dependencies
#
# Type `mix help deps` for examples and options
defp deps do
[{:phoenix, "~> 0.11"},
{:phoenix_ecto, "~> 0.3"},
{:postgrex, ">= 0.0.0"},
{:phoenix_live_reload, "~> 0.3"},
{:cowboy, "~> 1.0"},
{:excoder, "1.3.0", git: "git@bitbucket.org:Nicd/excoder.git"},
{:floki, "~> 0.1"},
{:httpoison, "~> 0.6"}
]
end
end

21
mix.lock Normal file
View file

@ -0,0 +1,21 @@
%{"cowboy": {:hex, :cowboy, "1.0.0"},
"cowlib": {:hex, :cowlib, "1.0.1"},
"decimal": {:hex, :decimal, "1.1.0"},
"ecto": {:hex, :ecto, "0.10.2"},
"excoder": {:git, "git@bitbucket.org:Nicd/excoder.git", "274736c587c3b48fa4c86b86c94bf915977385d3", []},
"floki": {:hex, :floki, "0.1.1"},
"fs": {:hex, :fs, "0.9.1"},
"hackney": {:hex, :hackney, "1.1.0"},
"httpoison": {:hex, :httpoison, "0.6.2"},
"iconv": {:git, "https://github.com/erylee/erlang-iconv.git", "bd9ed8cc16ba3595fc6993dc2e6bf97273ce7f6a", []},
"idna": {:hex, :idna, "1.0.2"},
"mochiweb": {:hex, :mochiweb, "2.12.2"},
"phoenix": {:hex, :phoenix, "0.11.0"},
"phoenix_ecto": {:hex, :phoenix_ecto, "0.3.1"},
"phoenix_live_reload": {:hex, :phoenix_live_reload, "0.3.1"},
"plug": {:hex, :plug, "0.11.3"},
"poison": {:hex, :poison, "1.4.0"},
"poolboy": {:hex, :poolboy, "1.4.2"},
"postgrex": {:hex, :postgrex, "0.8.1"},
"ranch": {:hex, :ranch, "1.0.0"},
"ssl_verify_hostname": {:hex, :ssl_verify_hostname, "1.0.4"}}

13
package.json Normal file
View file

@ -0,0 +1,13 @@
{
"repository": {
},
"dependencies": {
"brunch": "git://github.com/brunch/brunch#5176b6b4bf70cd8cb9dad0058dd3e83e8d983218",
"babel-brunch": "^4.0.0",
"clean-css-brunch": ">= 1.0 < 1.8",
"css-brunch": ">= 1.0 < 1.8",
"javascript-brunch": ">= 1.0 < 1.8",
"sass-brunch": "git://github.com/brunch/sass-brunch.git#master",
"uglify-js-brunch": ">= 1.0 < 1.8"
}
}

View file

@ -0,0 +1,39 @@
defmodule Katso.Repo.Migrations.Init do
use Ecto.Migration
def change do
create table(:magazines) do
add :name, :string
add :key, :string
end
create index(:magazines, [:key], unique: true)
create table(:fetches) do
add :magazine_id, references(:magazines)
add :total_score, :integer
add :total_titles, :integer
add :relative_score, :integer
timestamps
end
create table(:fetch_scores) do
add :fetch_id, references(:fetches)
add :score_type, :string
add :score_amount, :integer
end
create table(:titles) do
add :fetch_id, references(:fetches)
add :title, :text
end
create table(:title_scores) do
add :title_id, references(:titles)
add :score_type, :string
add :score_amount, :integer
add :score_words, {:array, :text}
end
end
end

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View file

@ -0,0 +1,8 @@
defmodule Katso.PageControllerTest do
use Katso.ConnCase
test "GET /" do
conn = get conn(), "/"
assert conn.resp_body =~ "Welcome to Phoenix!"
end
end

43
test/support/conn_case.ex Normal file
View file

@ -0,0 +1,43 @@
defmodule Katso.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
imports other functionalities to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
# Alias the data repository and import query/model functions
alias Katso.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
# Import URL helpers from the router
import Katso.Router.Helpers
# The default endpoint for testing
@endpoint Katso.Endpoint
end
end
setup tags do
unless tags[:async] do
Ecto.Adapters.SQL.restart_test_transaction(Katso.Repo, [])
end
:ok
end
end

6
test/test_helper.exs Normal file
View file

@ -0,0 +1,6 @@
ExUnit.start
# Create the database, run migrations, and start the test transaction.
Mix.Task.run "ecto.create", ["--quiet"]
Mix.Task.run "ecto.migrate", ["--quiet"]
Ecto.Adapters.SQL.begin_test_transaction(Katso.Repo)

View file

@ -0,0 +1,9 @@
defmodule Katso.PageController do
use Katso.Web, :controller
plug :action
def index(conn, _params) do
render conn, "index.html"
end
end

23
web/models/fetch.ex Normal file
View file

@ -0,0 +1,23 @@
defmodule Katso.Fetch do
use Katso.Web, :model
schema "fetches" do
field :total_score, :integer
field :total_titles, :integer
field :relative_score, :integer
timestamps
belongs_to :magazine, Katso.Magazine
has_many :titles, Katso.Title
has_many :fetch_scores, Katso.FetchScore
end
@required_fields ~w(total_score total_titles relative_score magazine_id)
@optional_fields ~w()
def changeset(model, params \\ nil) do
model
|> cast(params, @required_fields, @optional_fields)
end
end

18
web/models/fetch_score.ex Normal file
View file

@ -0,0 +1,18 @@
defmodule Katso.FetchScore do
use Katso.Web, :model
schema "fetch_scores" do
field :score_type, :string
field :score_amount, :integer
belongs_to :fetch, Katso.Fetch
end
@required_fields ~w(score_type score_amount fetch_id)
@optional_fields ~w()
def changeset(model, params \\ nil) do
model
|> cast(params, @required_fields, @optional_fields)
end
end

18
web/models/magazine.ex Normal file
View file

@ -0,0 +1,18 @@
defmodule Katso.Magazine do
use Katso.Web, :model
schema "magazines" do
field :name, :string
field :key, :string
has_many :fetches, Katso.Fetch
end
@required_fields ~w(name key)
@optional_fields ~w()
def changeset(model, params \\ nil) do
model
|> cast(params, @required_fields, @optional_fields)
end
end

18
web/models/title.ex Normal file
View file

@ -0,0 +1,18 @@
defmodule Katso.Title do
use Katso.Web, :model
schema "titles" do
field :title, :string
belongs_to :fetch, Katso.Fetch
has_many :title_scores, Katso.TitleScore
end
@required_fields ~w(title fetch_id)
@optional_fields ~w()
def changeset(model, params \\ nil) do
model
|> cast(params, @required_fields, @optional_fields)
end
end

19
web/models/title_score.ex Normal file
View file

@ -0,0 +1,19 @@
defmodule Katso.TitleScore do
use Katso.Web, :model
schema "title_scores" do
field :score_type, :string
field :score_amount, :integer
field :score_words, {:array, :string}
belongs_to :title, Katso.Title
end
@required_fields ~w(score_type score_amount score_words title_id)
@optional_fields ~w()
def changeset(model, params \\ nil) do
model
|> cast(params, @required_fields, @optional_fields)
end
end

16
web/router.ex Normal file
View file

@ -0,0 +1,16 @@
defmodule Katso.Router do
use Phoenix.Router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
end
scope "/", Katso do
pipe_through :browser # Use the default browser stack
get "/", PageController, :index
end
end

81
web/static/css/app.scss Normal file

File diff suppressed because one or more lines are too long

10
web/static/js/app.js Normal file
View file

@ -0,0 +1,10 @@
import {Socket} from "phoenix"
// let socket = new Socket("/ws")
// socket.join("topic:subtopic", {}, chan => {
// })
let App = {
}
export default App

763
web/static/vendor/phoenix.js vendored Normal file
View file

@ -0,0 +1,763 @@
(function(/*! Brunch !*/) {
'use strict';
var globals = typeof window !== 'undefined' ? window : global;
if (typeof globals.require === 'function') return;
var modules = {};
var cache = {};
var has = function(object, name) {
return ({}).hasOwnProperty.call(object, name);
};
var expand = function(root, name) {
var results = [], parts, part;
if (/^\.\.?(\/|$)/.test(name)) {
parts = [root, name].join('/').split('/');
} else {
parts = name.split('/');
}
for (var i = 0, length = parts.length; i < length; i++) {
part = parts[i];
if (part === '..') {
results.pop();
} else if (part !== '.' && part !== '') {
results.push(part);
}
}
return results.join('/');
};
var dirname = function(path) {
return path.split('/').slice(0, -1).join('/');
};
var localRequire = function(path) {
return function(name) {
var dir = dirname(path);
var absolute = expand(dir, name);
return globals.require(absolute, path);
};
};
var initModule = function(name, definition) {
var module = {id: name, exports: {}};
cache[name] = module;
definition(module.exports, localRequire(name), module);
return module.exports;
};
var require = function(name, loaderPath) {
var path = expand(name, '.');
if (loaderPath == null) loaderPath = '/';
if (has(cache, path)) return cache[path].exports;
if (has(modules, path)) return initModule(path, modules[path]);
var dirIndex = expand(path, './index');
if (has(cache, dirIndex)) return cache[dirIndex].exports;
if (has(modules, dirIndex)) return initModule(dirIndex, modules[dirIndex]);
throw new Error('Cannot find module "' + name + '" from '+ '"' + loaderPath + '"');
};
var define = function(bundle, fn) {
if (typeof bundle === 'object') {
for (var key in bundle) {
if (has(bundle, key)) {
modules[key] = bundle[key];
}
}
} else {
modules[bundle] = fn;
}
};
var list = function() {
var result = [];
for (var item in modules) {
if (has(modules, item)) {
result.push(item);
}
}
return result;
};
globals.require = require;
globals.require.define = define;
globals.require.register = define;
globals.require.list = list;
globals.require.brunch = true;
})();
require.define({'phoenix': function(exports, require, module){ "use strict";
var _classCallCheck = function (instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } };
var SOCKET_STATES = { connecting: 0, open: 1, closing: 2, closed: 3 };
var CHANNEL_EVENTS = {
close: "phx_close",
error: "phx_error",
join: "phx_join",
reply: "phx_reply",
leave: "phx_leave"
};
var Push = (function () {
// Initializes the Push
//
// chan - The Channel
// event - The event, ie `"phx_join"`
// payload - The payload, ie `{user_id: 123}`
// mergePush - The optional `Push` to merge hooks from
function Push(chan, event, payload, mergePush) {
var _this = this;
_classCallCheck(this, Push);
this.chan = chan;
this.event = event;
this.payload = payload || {};
this.receivedResp = null;
this.afterHooks = [];
this.recHooks = {};
this.sent = false;
if (mergePush) {
mergePush.afterHooks.forEach(function (hook) {
return _this.after(hook.ms, hook.callback);
});
for (var status in mergePush.recHooks) {
if (mergePush.recHooks.hasOwnProperty(status)) {
this.receive(status, mergePush.recHooks[status]);
}
}
}
}
Push.prototype.send = function send() {
var _this = this;
var ref = this.chan.socket.makeRef();
var refEvent = this.chan.replyEventName(ref);
this.chan.on(refEvent, function (payload) {
_this.receivedResp = payload;
_this.matchReceive(payload);
_this.chan.off(refEvent);
_this.cancelAfters();
});
this.startAfters();
this.sent = true;
this.chan.socket.push({
topic: this.chan.topic,
event: this.event,
payload: this.payload,
ref: ref
});
};
Push.prototype.receive = function receive(status, callback) {
if (this.receivedResp && this.receivedResp.status === status) {
callback(this.receivedResp.response);
}
this.recHooks[status] = callback;
return this;
};
Push.prototype.after = function after(ms, callback) {
var timer = null;
if (this.sent) {
timer = setTimeout(callback, ms);
}
this.afterHooks.push({ ms: ms, callback: callback, timer: timer });
return this;
};
// private
Push.prototype.matchReceive = function matchReceive(_ref) {
var status = _ref.status;
var response = _ref.response;
var ref = _ref.ref;
var callback = this.recHooks[status];
if (!callback) {
return;
}
if (this.event === CHANNEL_EVENTS.join) {
callback(this.chan);
} else {
callback(response);
}
};
Push.prototype.cancelAfters = function cancelAfters() {
this.afterHooks.forEach(function (hook) {
clearTimeout(hook.timer);
hook.timer = null;
});
};
Push.prototype.startAfters = function startAfters() {
this.afterHooks.map(function (hook) {
if (!hook.timer) {
hook.timer = setTimeout(function () {
return hook.callback();
}, hook.ms);
}
});
};
return Push;
})();
var Channel = exports.Channel = (function () {
function Channel(topic, message, callback, socket) {
_classCallCheck(this, Channel);
this.topic = topic;
this.message = message;
this.callback = callback;
this.socket = socket;
this.bindings = [];
this.afterHooks = [];
this.recHooks = {};
this.joinPush = new Push(this, CHANNEL_EVENTS.join, this.message);
this.reset();
}
Channel.prototype.after = function after(ms, callback) {
this.joinPush.after(ms, callback);
return this;
};
Channel.prototype.receive = function receive(status, callback) {
this.joinPush.receive(status, callback);
return this;
};
Channel.prototype.rejoin = function rejoin() {
this.reset();
this.joinPush.send();
};
Channel.prototype.onClose = function onClose(callback) {
this.on(CHANNEL_EVENTS.close, callback);
};
Channel.prototype.onError = function onError(callback) {
var _this = this;
this.on(CHANNEL_EVENTS.error, function (reason) {
callback(reason);
_this.trigger(CHANNEL_EVENTS.close, "error");
});
};
Channel.prototype.reset = function reset() {
var _this = this;
this.bindings = [];
var newJoinPush = new Push(this, CHANNEL_EVENTS.join, this.message, this.joinPush);
this.joinPush = newJoinPush;
this.onError(function (reason) {
setTimeout(function () {
return _this.rejoin();
}, _this.socket.reconnectAfterMs);
});
this.on(CHANNEL_EVENTS.reply, function (payload) {
_this.trigger(_this.replyEventName(payload.ref), payload);
});
};
Channel.prototype.on = function on(event, callback) {
this.bindings.push({ event: event, callback: callback });
};
Channel.prototype.isMember = function isMember(topic) {
return this.topic === topic;
};
Channel.prototype.off = function off(event) {
this.bindings = this.bindings.filter(function (bind) {
return bind.event !== event;
});
};
Channel.prototype.trigger = function trigger(triggerEvent, msg) {
this.bindings.filter(function (bind) {
return bind.event === triggerEvent;
}).map(function (bind) {
return bind.callback(msg);
});
};
Channel.prototype.push = function push(event, payload) {
var pushEvent = new Push(this, event, payload);
pushEvent.send();
return pushEvent;
};
Channel.prototype.replyEventName = function replyEventName(ref) {
return "chan_reply_" + ref;
};
Channel.prototype.leave = function leave() {
var _this = this;
return this.push(CHANNEL_EVENTS.leave).receive("ok", function () {
_this.socket.leave(_this);
chan.reset();
});
};
return Channel;
})();
var Socket = exports.Socket = (function () {
// Initializes the Socket
//
// endPoint - The string WebSocket endpoint, ie, "ws://example.com/ws",
// "wss://example.com"
// "/ws" (inherited host & protocol)
// opts - Optional configuration
// transport - The Websocket Transport, ie WebSocket, Phoenix.LongPoller.
// Defaults to WebSocket with automatic LongPoller fallback.
// heartbeatIntervalMs - The millisec interval to send a heartbeat message
// reconnectAfterMs - The millisec interval to reconnect after connection loss
// logger - The optional function for specialized logging, ie:
// `logger: function(msg){ console.log(msg) }`
// longpoller_timeout - The maximum timeout of a long poll AJAX request.
// Defaults to 20s (double the server long poll timer).
//
// For IE8 support use an ES5-shim (https://github.com/es-shims/es5-shim)
//
function Socket(endPoint) {
var opts = arguments[1] === undefined ? {} : arguments[1];
_classCallCheck(this, Socket);
this.states = SOCKET_STATES;
this.stateChangeCallbacks = { open: [], close: [], error: [], message: [] };
this.flushEveryMs = 50;
this.reconnectTimer = null;
this.channels = [];
this.sendBuffer = [];
this.ref = 0;
this.transport = opts.transport || window.WebSocket || LongPoller;
this.heartbeatIntervalMs = opts.heartbeatIntervalMs || 30000;
this.reconnectAfterMs = opts.reconnectAfterMs || 5000;
this.logger = opts.logger || function () {}; // noop
this.longpoller_timeout = opts.longpoller_timeout || 20000;
this.endPoint = this.expandEndpoint(endPoint);
this.resetBufferTimer();
}
Socket.prototype.protocol = function protocol() {
return location.protocol.match(/^https/) ? "wss" : "ws";
};
Socket.prototype.expandEndpoint = function expandEndpoint(endPoint) {
if (endPoint.charAt(0) !== "/") {
return endPoint;
}
if (endPoint.charAt(1) === "/") {
return "" + this.protocol() + ":" + endPoint;
}
return "" + this.protocol() + "://" + location.host + "" + endPoint;
};
Socket.prototype.disconnect = function disconnect(callback, code, reason) {
if (this.conn) {
this.conn.onclose = function () {}; // noop
if (code) {
this.conn.close(code, reason || "");
} else {
this.conn.close();
}
this.conn = null;
}
callback && callback();
};
Socket.prototype.connect = function connect() {
var _this = this;
this.disconnect(function () {
_this.conn = new _this.transport(_this.endPoint);
_this.conn.timeout = _this.longpoller_timeout;
_this.conn.onopen = function () {
return _this.onConnOpen();
};
_this.conn.onerror = function (error) {
return _this.onConnError(error);
};
_this.conn.onmessage = function (event) {
return _this.onConnMessage(event);
};
_this.conn.onclose = function (event) {
return _this.onConnClose(event);
};
});
};
Socket.prototype.resetBufferTimer = function resetBufferTimer() {
var _this = this;
clearTimeout(this.sendBufferTimer);
this.sendBufferTimer = setTimeout(function () {
return _this.flushSendBuffer();
}, this.flushEveryMs);
};
// Logs the message. Override `this.logger` for specialized logging. noops by default
Socket.prototype.log = function log(msg) {
this.logger(msg);
};
// Registers callbacks for connection state change events
//
// Examples
//
// socket.onError function(error){ alert("An error occurred") }
//
Socket.prototype.onOpen = function onOpen(callback) {
this.stateChangeCallbacks.open.push(callback);
};
Socket.prototype.onClose = function onClose(callback) {
this.stateChangeCallbacks.close.push(callback);
};
Socket.prototype.onError = function onError(callback) {
this.stateChangeCallbacks.error.push(callback);
};
Socket.prototype.onMessage = function onMessage(callback) {
this.stateChangeCallbacks.message.push(callback);
};
Socket.prototype.onConnOpen = function onConnOpen() {
var _this = this;
clearInterval(this.reconnectTimer);
if (!this.conn.skipHeartbeat) {
clearInterval(this.heartbeatTimer);
this.heartbeatTimer = setInterval(function () {
return _this.sendHeartbeat();
}, this.heartbeatIntervalMs);
}
this.rejoinAll();
this.stateChangeCallbacks.open.forEach(function (callback) {
return callback();
});
};
Socket.prototype.onConnClose = function onConnClose(event) {
var _this = this;
this.log("WS close:");
this.log(event);
clearInterval(this.reconnectTimer);
clearInterval(this.heartbeatTimer);
this.reconnectTimer = setInterval(function () {
return _this.connect();
}, this.reconnectAfterMs);
this.stateChangeCallbacks.close.forEach(function (callback) {
return callback(event);
});
};
Socket.prototype.onConnError = function onConnError(error) {
this.log("WS error:");
this.log(error);
this.stateChangeCallbacks.error.forEach(function (callback) {
return callback(error);
});
};
Socket.prototype.connectionState = function connectionState() {
switch (this.conn && this.conn.readyState) {
case this.states.connecting:
return "connecting";
case this.states.open:
return "open";
case this.states.closing:
return "closing";
default:
return "closed";
}
};
Socket.prototype.isConnected = function isConnected() {
return this.connectionState() === "open";
};
Socket.prototype.rejoinAll = function rejoinAll() {
this.channels.forEach(function (chan) {
return chan.rejoin();
});
};
Socket.prototype.join = function join(topic, message, callback) {
var chan = new Channel(topic, message, callback, this);
this.channels.push(chan);
if (this.isConnected()) {
chan.rejoin();
}
return chan;
};
Socket.prototype.leave = function leave(chan) {
this.channels = this.channels.filter(function (c) {
return !c.isMember(chan.topic);
});
};
Socket.prototype.push = function push(data) {
var _this = this;
var callback = function () {
return _this.conn.send(JSON.stringify(data));
};
if (this.isConnected()) {
callback();
} else {
this.sendBuffer.push(callback);
}
};
// Return the next message ref, accounting for overflows
Socket.prototype.makeRef = function makeRef() {
var newRef = this.ref + 1;
if (newRef === this.ref) {
this.ref = 0;
} else {
this.ref = newRef;
}
return this.ref.toString();
};
Socket.prototype.sendHeartbeat = function sendHeartbeat() {
this.push({ topic: "phoenix", event: "heartbeat", payload: {}, ref: this.makeRef() });
};
Socket.prototype.flushSendBuffer = function flushSendBuffer() {
if (this.isConnected() && this.sendBuffer.length > 0) {
this.sendBuffer.forEach(function (callback) {
return callback();
});
this.sendBuffer = [];
}
this.resetBufferTimer();
};
Socket.prototype.onConnMessage = function onConnMessage(rawMessage) {
this.log("message received:");
this.log(rawMessage);
var _JSON$parse = JSON.parse(rawMessage.data);
var topic = _JSON$parse.topic;
var event = _JSON$parse.event;
var payload = _JSON$parse.payload;
this.channels.filter(function (chan) {
return chan.isMember(topic);
}).forEach(function (chan) {
return chan.trigger(event, payload);
});
this.stateChangeCallbacks.message.forEach(function (callback) {
callback(topic, event, payload);
});
};
return Socket;
})();
var LongPoller = exports.LongPoller = (function () {
function LongPoller(endPoint) {
_classCallCheck(this, LongPoller);
this.retryInMs = 5000;
this.endPoint = null;
this.token = null;
this.sig = null;
this.skipHeartbeat = true;
this.onopen = function () {}; // noop
this.onerror = function () {}; // noop
this.onmessage = function () {}; // noop
this.onclose = function () {}; // noop
this.states = SOCKET_STATES;
this.upgradeEndpoint = this.normalizeEndpoint(endPoint);
this.pollEndpoint = this.upgradeEndpoint + (/\/$/.test(endPoint) ? "poll" : "/poll");
this.readyState = this.states.connecting;
this.poll();
}
LongPoller.prototype.normalizeEndpoint = function normalizeEndpoint(endPoint) {
return endPoint.replace("ws://", "http://").replace("wss://", "https://");
};
LongPoller.prototype.endpointURL = function endpointURL() {
return this.pollEndpoint + ("?token=" + encodeURIComponent(this.token) + "&sig=" + encodeURIComponent(this.sig));
};
LongPoller.prototype.closeAndRetry = function closeAndRetry() {
this.close();
this.readyState = this.states.connecting;
};
LongPoller.prototype.ontimeout = function ontimeout() {
this.onerror("timeout");
this.closeAndRetry();
};
LongPoller.prototype.poll = function poll() {
var _this = this;
if (!(this.readyState === this.states.open || this.readyState === this.states.connecting)) {
return;
}
Ajax.request("GET", this.endpointURL(), "application/json", null, this.timeout, this.ontimeout.bind(this), function (resp) {
if (resp) {
var status = resp.status;
var token = resp.token;
var sig = resp.sig;
var messages = resp.messages;
_this.token = token;
_this.sig = sig;
} else {
var status = 0;
}
switch (status) {
case 200:
messages.forEach(function (msg) {
return _this.onmessage({ data: JSON.stringify(msg) });
});
_this.poll();
break;
case 204:
_this.poll();
break;
case 410:
_this.readyState = _this.states.open;
_this.onopen();
_this.poll();
break;
case 0:
case 500:
_this.onerror();
_this.closeAndRetry();
break;
default:
throw "unhandled poll status " + status;
}
});
};
LongPoller.prototype.send = function send(body) {
var _this = this;
Ajax.request("POST", this.endpointURL(), "application/json", body, this.timeout, this.onerror.bind(this, "timeout"), function (resp) {
if (!resp || resp.status !== 200) {
_this.onerror(status);
_this.closeAndRetry();
}
});
};
LongPoller.prototype.close = function close(code, reason) {
this.readyState = this.states.closed;
this.onclose();
};
return LongPoller;
})();
var Ajax = exports.Ajax = (function () {
function Ajax() {
_classCallCheck(this, Ajax);
}
Ajax.request = function request(method, endPoint, accept, body, timeout, ontimeout, callback) {
if (window.XDomainRequest) {
var req = new XDomainRequest(); // IE8, IE9
this.xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback);
} else {
var req = window.XMLHttpRequest ? new XMLHttpRequest() : // IE7+, Firefox, Chrome, Opera, Safari
new ActiveXObject("Microsoft.XMLHTTP"); // IE6, IE5
this.xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback);
}
};
Ajax.xdomainRequest = function xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback) {
var _this = this;
req.timeout = timeout;
req.open(method, endPoint);
req.onload = function () {
var response = _this.parseJSON(req.responseText);
callback && callback(response);
};
if (ontimeout) {
req.ontimeout = ontimeout;
}
// Work around bug in IE9 that requires an attached onprogress handler
req.onprogress = function () {};
req.send(body);
};
Ajax.xhrRequest = function xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback) {
var _this = this;
req.timeout = timeout;
req.open(method, endPoint, true);
req.setRequestHeader("Content-Type", accept);
req.onerror = function () {
callback && callback(null);
};
req.onreadystatechange = function () {
if (req.readyState === _this.states.complete && callback) {
var response = _this.parseJSON(req.responseText);
callback(response);
}
};
if (ontimeout) {
req.ontimeout = ontimeout;
}
req.send(body);
};
Ajax.parseJSON = function parseJSON(resp) {
return resp && resp !== "" ? JSON.parse(resp) : null;
};
return Ajax;
})();
Ajax.states = { complete: 4 };
exports.__esModule = true;
}});
if(typeof(window) === 'object' && !window.Phoenix){ window.Phoenix = require('phoenix') };

View file

@ -0,0 +1,35 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="">
<meta name="author" content="">
<title>Oho! Katso kuvat ja tilastot! Arvaatko, mikä lehti on surkein?</title>
<link rel="stylesheet" href="<%= static_path(@conn, "/css/app.css") %>">
</head>
<body>
<div class="container">
<div class="header">
<h1>
Oho! Katso kuvat!
</h1>
</div>
<p class="alert alert-info"><%= get_flash(@conn, :info) %></p>
<p class="alert alert-danger"><%= get_flash(@conn, :error) %></p>
<%= @inner %>
<div class="footer">
<p>© Mikko Ahlroth 2015</p>
</div>
</div> <!-- /container -->
<script src="<%= static_path(@conn, "/js/app.js") %>"></script>
<script>require("web/static/js/app")</script>
</body>
</html>

View file

@ -0,0 +1,10 @@
<div class="jumbotron">
<h2>Onko tämä Internetin paras sivusto?</h2>
<p class="lead">Nyt se on tutkittu! Tämä outo sivusto selvittää, mikä nykyjournalismissa on vikana. Vai onko?</p>
</div>
<div class="row">
<div class="col-xs-12">
<h1>Tämän hetken lööpeimmät</h1>
</div>
</div>

17
web/views/error_view.ex Normal file
View file

@ -0,0 +1,17 @@
defmodule Katso.ErrorView do
use Katso.Web, :view
def render("404.html", _assigns) do
"Page not found - 404"
end
def render("500.html", _assigns) do
"Server internal error - 500"
end
# In case no render clause matches or no
# template is found, let's render it as 500
def template_not_found(_template, assigns) do
render "500.html", assigns
end
end

3
web/views/layout_view.ex Normal file
View file

@ -0,0 +1,3 @@
defmodule Katso.LayoutView do
use Katso.Web, :view
end

3
web/views/page_view.ex Normal file
View file

@ -0,0 +1,3 @@
defmodule Katso.PageView do
use Katso.Web, :view
end

72
web/web.ex Normal file
View file

@ -0,0 +1,72 @@
defmodule Katso.Web do
@moduledoc """
A module that keeps using definitions for controllers,
views and so on.
This can be used in your application as:
use Katso.Web, :controller
use Katso.Web, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below.
"""
def model do
quote do
use Ecto.Model
end
end
def controller do
quote do
use Phoenix.Controller
# Alias the data repository and import query/model functions
alias Katso.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
# Import URL helpers from the router
import Katso.Router.Helpers
end
end
def view do
quote do
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 2]
# Import URL helpers from the router
import Katso.Router.Helpers
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
end
end
def channel do
quote do
use Phoenix.Channel
# Alias the data repository and import query/model functions
alias Katso.Repo
import Ecto.Model
import Ecto.Query, only: [from: 2]
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end