From 93d6d36eb697cd9452eb4aab446151a1a33ed245 Mon Sep 17 00:00:00 2001 From: Adam Mathes Date: Mon, 23 Jan 2017 20:04:03 -0800 Subject: neko v2 initial commit --- .gitignore | 5 + LICENSE | 7 + Makefile | 22 ++ README.md | 72 ++++++ cmd/nekocrawl.go | 14 ++ cmd/nekoweb.go | 14 ++ config.example | 7 + config/config.go | 29 +++ crawler/crawler.go | 63 +++++ importer/importer.go | 80 +++++++ init.sql | 24 ++ main.go | 63 +++++ models/db.go | 24 ++ models/feed/feed.go | 103 +++++++++ models/item/item.go | 118 ++++++++++ reset.sql | 2 + reset_db.sh | 2 + static/backbone.min.js | 4 + static/jquery.min.js | 5 + static/jquery.tmpl.min.js | 10 + static/login.html | 29 +++ static/public.html | 77 +++++++ static/style.css | 244 ++++++++++++++++++++ static/ui.html | 87 +++++++ static/ui.js | 573 ++++++++++++++++++++++++++++++++++++++++++++++ static/underscore.min.js | 1 + web/web.go | 131 +++++++++++ 27 files changed, 1810 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 README.md create mode 100644 cmd/nekocrawl.go create mode 100644 cmd/nekoweb.go create mode 100644 config.example create mode 100644 config/config.go create mode 100644 crawler/crawler.go create mode 100644 importer/importer.go create mode 100644 init.sql create mode 100644 main.go create mode 100644 models/db.go create mode 100644 models/feed/feed.go create mode 100644 models/item/item.go create mode 100644 reset.sql create mode 100755 reset_db.sh create mode 100644 static/backbone.min.js create mode 100644 static/jquery.min.js create mode 100644 static/jquery.tmpl.min.js create mode 100644 static/login.html create mode 100644 static/public.html create mode 100644 static/style.css create mode 100644 static/ui.html create mode 100644 static/ui.js create mode 100644 static/underscore.min.js create mode 100644 web/web.go diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..fc40e46 --- /dev/null +++ b/.gitignore @@ -0,0 +1,5 @@ +config.json +neko +nekocrawl +nekoweb +.DS_Store diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d2428b1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,7 @@ +Copyright (c) 2017 Adam Mathes All rights reserved. + +Redistribution and use in source and binary forms are permitted provided that the above copyright notice and this paragraph are duplicated in all such forms and that any documentation, advertising materials, and other materials related to such distribution and use acknowledge that the software was developed by Adam Mathes. + +The name of "Adam Mathes" and "neko" may not be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED ``AS IS'' AND WITHOUT ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. \ No newline at end of file diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..baef138 --- /dev/null +++ b/Makefile @@ -0,0 +1,22 @@ +SOURCES = $(wildcard *.go) $(wildcard */*.go) +BINARIES = nekoweb nekocrawl + +DEPS = github.com/SlyMarbo/rss github.com/abbot/go-http-auth github.com/axgle/mahonia github.com/go-sql-driver/mysql github.com/microcosm-cc/bluemonday + +default: $(BINARIES) + + +$(BINARIES): $(SOURCES) + go build + go build cmd/nekoweb.go + go build cmd/nekocrawl.go + + +.PHONY: deps run + + +.PHONY: run +deps: + go get $(DEPS) +run: + ./nekoweb config.json diff --git a/README.md b/README.md new file mode 100644 index 0000000..e6c19cb --- /dev/null +++ b/README.md @@ -0,0 +1,72 @@ +# Neko + +RSS Reader focused on cat ears mode + +*the cat ears are in your mind* + +## Huh? + +I decided I didn't like the [old version that was python and mongo](https://github.com/adammathes/neko_v1) so rewrote the backend in Go and sql and wanted to learn some Go. So assume the code is bad since I don't know what I'm doing. + +The javascript frontend is still the same, I might rewrite that too since it's old backbone.js code. + +Uh, this is not very easy to use/setup/or anything. Consider it WIP. + +## Installation + +1. [Install golang](https://golang.org) + +2. Set up $GOPATH if one doesn't exist already + + $ mkdir $HOME/go + $ export GOPATH=$HOME/go + +3. Get neko code + + $ go get github.com/adammathes/neko + +4. Get dependencies + + $ cd $HOME/go/src/github.com/adammathes/neko + $ make deps + OR + $ go get [each dependency] + + Great job + +5. Build binaries + + $ go build cmd/nekoweb + $ go build cmd/nekocrawl + + This should create "nekoweb" and "nekocrawl" binaries + + +6. Create MySQL table and user + + ```sh $ msyqladmin -uroot -p create neko + $ mysql -uroot -p neko < init.sql + $ echo "probably a good idea to make a limited privilege user" + $ mysql -uroot -p neko + CREATE USER 'neko'@'localhost' identified by 'password' yourgreatpasswordhere; + GRANT ALL PRIVILEGES ON neko.* TO 'neko'@'localhost'; + ``` + +7. Configuration - copy example configuration and edit as needed + + $ cp config.example config.json + +8. Run web + + $ ./nekoweb config.json + + Load URL/port specified in config. Add feeds + +9. Run Crawler + + $ ./nekocrawl config.json + +10. Operationalize + + [ add to cron ] + [ add daemon for server ] diff --git a/cmd/nekocrawl.go b/cmd/nekocrawl.go new file mode 100644 index 0000000..2852e4f --- /dev/null +++ b/cmd/nekocrawl.go @@ -0,0 +1,14 @@ +package main + +import ( + "neko/config" + "neko/crawler" + "neko/models" + "os" +) + +func main() { + config.Read(os.Args[1]) + models.InitDB(config.Config.DBServer) + crawler.Crawl() +} diff --git a/cmd/nekoweb.go b/cmd/nekoweb.go new file mode 100644 index 0000000..0904a61 --- /dev/null +++ b/cmd/nekoweb.go @@ -0,0 +1,14 @@ +package main + +import ( + "neko/config" + "neko/models" + "neko/web" + "os" +) + +func main() { + config.Read(os.Args[1]) + models.InitDB(config.Config.DBServer) + web.Serve() +} diff --git a/config.example b/config.example new file mode 100644 index 0000000..08f4ce4 --- /dev/null +++ b/config.example @@ -0,0 +1,7 @@ +{ + "DBServer": "root:@tcp(127.0.0.1:3306)/neko", + "WebServer": "127.0.0.0.1:4994", + "Username": "username", + "Realm": "example.com:443", + "DigestPassword": "plaintextpasswordforhttpDigestAuth" +} diff --git a/config/config.go b/config/config.go new file mode 100644 index 0000000..59baa2b --- /dev/null +++ b/config/config.go @@ -0,0 +1,29 @@ +package config + +import ( + "encoding/json" + "io/ioutil" + "log" +) + +type Settings struct { + DBServer string + WebServer string + Username string + Realm string + DigestPassword string +} + +var Config Settings + +func Read(filename string) { + file, e := ioutil.ReadFile(filename) + if e != nil { + log.Fatal("Can not read config file", e) + } + + e = json.Unmarshal(file, &Config) + if e != nil { + log.Fatal("Config read error", e) + } +} diff --git a/crawler/crawler.go b/crawler/crawler.go new file mode 100644 index 0000000..e3e4aeb --- /dev/null +++ b/crawler/crawler.go @@ -0,0 +1,63 @@ +package crawler + +import ( + "log" + "neko/models/feed" + "neko/models/item" + "net/http" + "time" + "github.com/SlyMarbo/rss" +) + + +func Crawl() { + + ch := make(chan string) + + feeds,err := feed.All() + if err != nil { + log.Fatal(err) + } + for _, f := range feeds { + log.Printf("crawling %s", f.Url) + go CrawlFeed(f, ch) + } + + for i := 0; i < len(feeds); i++ { + log.Println(<-ch) + } +} + +/* + TODO: sanitize input on crawl +*/ +func CrawlFeed(f *feed.Feed, ch chan<- string) { + c := &http.Client{ + // give up after 5 seconds + Timeout: 5 * time.Second, + } + + feed, err := rss.FetchByClient(f.Url, c) + if err != nil { + log.Print(err) + ch <- "failed to fetch and parse for " + f.Url + return + } + + f.Title = feed.Title + f.Update() + + for _, i := range feed.Items { + log.Printf("storing item: %s", i.Title) + var item item.Item + item.Title = i.Title + item.Url = i.Link + item.Description = i.Content + if item.Description == "" { + item.Description = i.Summary + } + item.FeedId = f.Id + item.Create() + } + ch <- "successfully crawled " + f.Url +} diff --git a/importer/importer.go b/importer/importer.go new file mode 100644 index 0000000..f746d72 --- /dev/null +++ b/importer/importer.go @@ -0,0 +1,80 @@ +package importer + +import ( + // "bufio" + "encoding/json" + //"fmt" + "io" + "log" + "neko/models/feed" + "neko/models/item" + "os" +) + +type IItem struct { + Title string `json:"title"` + Url string `json:"url"` + Description string `json:"description"` + ReadState bool `json:"read"` + Starred bool `json:"starred"` + Date *IDate `json:"date"` + Feed *IFeed `json:"feed"` +} + +type IFeed struct { + Url string `json:"url"` + Title string `json:"title"` + WebUrl string `json:"web_url"` +} + +type IDate struct { + Date string `json:"$date"` +} + +func ImportJSON(filename string) { + + f, err := os.Open(filename) + if err != nil { + log.Fatal(err) + } + + dec := json.NewDecoder(f) + for { + var ii IItem + if err := dec.Decode(&ii); err == io.EOF { + break + } else if err != nil { + log.Println(err) + } else { + InsertIItem(&ii) + } + } +} + +func InsertIItem(ii *IItem) { + var f feed.Feed + + if ii.Feed == nil { + return + } + err := f.ByUrl(ii.Feed.Url) + if err != nil { + f.Url = ii.Feed.Url + f.Title = ii.Feed.Title + f.Create() + } + + var i item.Item + i.FeedId = f.Id + i.Title = ii.Title + i.Url = ii.Url + i.Description = ii.Description + + i.PublishDate = ii.Date.Date + + err = i.Create() + log.Printf("inserted %s\n", i.Url) + if err != nil { + log.Println(err) + } +} diff --git a/init.sql b/init.sql new file mode 100644 index 0000000..7b01fda --- /dev/null +++ b/init.sql @@ -0,0 +1,24 @@ +CREATE TABLE feed ( + id INT NOT NULL AUTO_INCREMENT, + url VARCHAR(100) NOT NULL, + web_url VARCHAR(100) NOT NULL DEFAULT "", + title VARCHAR(100) NOT NULL DEFAULT "", + last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, + UNIQUE KEY (url), + PRIMARY KEY (id) +); + +CREATE TABLE item ( + id INT NOT NULL AUTO_INCREMENT, + feed_id INT NOT NULL, + title TEXT, + url VARCHAR(100) NOT NULL, + description TEXT, + publish_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP NOT NULL, + read_state BOOLEAN DEFAULT FALSE NOT NULL, + FOREIGN KEY (feed_id) REFERENCES feed(id) ON DELETE CASCADE, + UNIQUE KEY (url), + INDEX (publish_date), + PRIMARY KEY (id) +); + diff --git a/main.go b/main.go new file mode 100644 index 0000000..255b194 --- /dev/null +++ b/main.go @@ -0,0 +1,63 @@ +package main + +import ( + "os" + "neko/config" + "neko/models" + "neko/models/feed" + "neko/crawler" + "neko/importer" + "fmt" + // "io/ioutil" + "log" + // "neko/models/item" + // "neko/util" + "neko/web" +) + +func main() { + + // FIX + config.Read("./config.json") + + models.InitDB(config.Config.DBServer) + if len(os.Args) < 2 { + fmt.Printf("usage: neko [web|addfeed|crawl]\n") + fmt.Printf("addfeed -- add a new feed from \n") + return + } + cmd := os.Args[1] + switch cmd { + case "web": + log.Printf("starting web server at %s", config.Config.WebServer) + web.Serve() + case "addfeed": + addFeed() + case "crawl": + crawl() + case "import": + importLegacy() + default: + panic("not a valid command") + } +} + + +func addFeed() { + if len(os.Args) < 2 { + log.Fatal("need a valid url") + } + url := os.Args[2] + feed.NewFeed(url) +} + +func importLegacy() { + json_file := os.Args[2] + log.Printf("importing json file from: %s", json_file) + importer.ImportJSON(json_file) +} + + +func crawl() { + crawler.Crawl() +} diff --git a/models/db.go b/models/db.go new file mode 100644 index 0000000..b8bc655 --- /dev/null +++ b/models/db.go @@ -0,0 +1,24 @@ +/* +Package neko/models implements behavior for the entities necessary for the subscription services and handles persistence via a mysql/maridb database +*/ +package models + +import ( + "database/sql" + _ "github.com/go-sql-driver/mysql" + "log" +) + +var DB *sql.DB + +func InitDB(dataSourceName string) { + var err error + DB, err = sql.Open("mysql", dataSourceName) + if err != nil { + log.Panic(err) + } + + if err = DB.Ping(); err != nil { + log.Panic(err) + } +} diff --git a/models/feed/feed.go b/models/feed/feed.go new file mode 100644 index 0000000..4abebb4 --- /dev/null +++ b/models/feed/feed.go @@ -0,0 +1,103 @@ +package feed + +import ( + "log" + "neko/models" +) + +type Feed struct { + Id int64 `json:"_id"` + Url string `json:"url"` + Title string `json:"title"` + // TODO: last_updated scan +} + +func NewFeed(url string) error { + stmt, err := models.DB.Prepare("INSERT INTO feed(url) VALUES(?)") + if err != nil { + return err + } + _, err = stmt.Exec(url) + if err != nil { + return err + } + return nil +} + +func All() ([]*Feed, error) { + return filter("") +} + +func filter(where string) ([]*Feed, error) { + // todo: add back in title + rows, err := models.DB.Query(`SELECT id, url, title + FROM feed ` + where) + if err != nil { + return nil, err + } + defer rows.Close() + + feeds := make([]*Feed, 0) + for rows.Next() { + f := new(Feed) + err := rows.Scan(&f.Id, &f.Url, &f.Title) + if err != nil { + return nil, err + } + feeds = append(feeds, f) + } + if err = rows.Err(); err != nil { + return nil, err + } + return feeds, nil +} + +func (f *Feed) Update() { + if len(f.Title) == 0 { + return + } + + if f.Id == 0 { + return + } + + if len(f.Url) == 0 { + return + } + + models.DB.Query(`UPDATE feed + SET title=?, url=? + WHERE id=?`, f.Title, f.Url, f.Id) +} + +func (f *Feed) Delete() { + log.Println("lets delete some shiteeee") + _, err := models.DB.Exec(`DELETE FROM feed + WHERE id=?`, f.Id) + if err != nil { + log.Println(err) + } +} + +func (f *Feed) ByUrl(url string) error { + err := models.DB.QueryRow(`SELECT id, url, title + FROM feed + WHERE url = ?`, url).Scan(&f.Id, &f.Url, &f.Title) + if err != nil { + return err + } + return nil +} + +func (f *Feed) Create() error { + res, err := models.DB.Exec(`INSERT INTO feed(url, title) + VALUES(?, ?)`, f.Url, f.Title) + if err != nil { + return err + } + + id, _ := res.LastInsertId() + f.Id = id + + return nil +} diff --git a/models/item/item.go b/models/item/item.go new file mode 100644 index 0000000..51166c4 --- /dev/null +++ b/models/item/item.go @@ -0,0 +1,118 @@ +package item + +import ( + "fmt" + "log" + "neko/models" + // "database/sql" + "github.com/microcosm-cc/bluemonday" +) + +type Item struct { + Id int64 `json:"_id,string,omitempty"` + Title string `json:"title"` + Url string `json:"url"` + Description string `json:"description"` + ReadState bool `json:"read"` + FeedId int64 + PublishDate string `json:"publish_date"` + FeedTitle string `json:"feed_title"` + FeedUrl string `json:"feed_url"` +} + +func (i *Item) Print() { + fmt.Printf("id: %d\n", i.Id) + fmt.Printf("title: %s\n", i.Title) + fmt.Printf("ReadState: %d\n", i.ReadState) +} + +func (i *Item) Create() error { + res, err := models.DB.Exec(`INSERT INTO + item(title, url, description, feed_id) + VALUES(?, ?, ?, ?)`, i.Title, i.Url, i.Description, i.FeedId) + if err != nil { + return err + } + + id, _ := res.LastInsertId() + i.Id = id + + return nil +} + +func (i *Item) Save() { + _, err := models.DB.Exec(`UPDATE item + SET read_state=? + WHERE id=?`, i.ReadState, i.Id) + if err != nil { + log.Println(err) + } +} + +func (i *Item) FullSave() { + _, err := models.DB.Exec(`UPDATE item + SET title=?, url=?, description=?, feed_id=? + WHERE id=?`, i.Title, i.Url, i.Description, i.FeedId, i.Id) + if err != nil { + log.Println(err) + } +} + +func Filter(max_id int64, feed_id int64, unread_only bool) ([]*Item, error) { + + var args []interface{} + + query := `SELECT item.id, item.title, item.url, item.description, + item.read_state, item.publish_date, + feed.url, feed.title + FROM item,feed + WHERE item.feed_id=feed.id ` + + if max_id != 0 { + query = query + "AND item.id < ? " + args = append(args, max_id) + } + + if feed_id != 0 { + query = query + " AND feed.id=? " + args = append(args, feed_id) + } + + if unread_only { + query = query + " AND item.read_state=0 " + } + + query = query + "ORDER BY item.id DESC LIMIT 15" + // log.Println(query) + // log.Println(args...) + + rows, err := models.DB.Query(query, args...) + if err != nil { + log.Println(err) + return nil, err + } + defer rows.Close() + + p := bluemonday.NewPolicy() + p.AllowElements("blockquote", "a", "img", "p", "h1", "h2", "h3", "h4", "b", "i", "em", "strong") + p.AllowAttrs("href").OnElements("a") + p.AllowAttrs("src", "alt").OnElements("img") + + + items := make([]*Item, 0) + for rows.Next() { + i := new(Item) + err := rows.Scan(&i.Id, &i.Title, &i.Url, &i.Description, &i.ReadState, &i.PublishDate, &i.FeedUrl, &i.FeedTitle) + if err != nil { + log.Println(err) + return nil, err + } + i.Description = p.Sanitize(i.Description) + // TODO: sanitize other fields + items = append(items, i) + } + if err = rows.Err(); err != nil { + return nil, err + } + return items, nil +} diff --git a/reset.sql b/reset.sql new file mode 100644 index 0000000..6bf30e1 --- /dev/null +++ b/reset.sql @@ -0,0 +1,2 @@ +DROP TABLE feed; +DROP TABLE item; diff --git a/reset_db.sh b/reset_db.sh new file mode 100755 index 0000000..9d4f0a3 --- /dev/null +++ b/reset_db.sh @@ -0,0 +1,2 @@ +mysql -uroot neko < reset.sql +mysql -uroot neko < init.sql diff --git a/static/backbone.min.js b/static/backbone.min.js new file mode 100644 index 0000000..3541019 --- /dev/null +++ b/static/backbone.min.js @@ -0,0 +1,4 @@ +(function(){var t=this;var e=t.Backbone;var i=[];var r=i.push;var s=i.slice;var n=i.splice;var a;if(typeof exports!=="undefined"){a=exports}else{a=t.Backbone={}}a.VERSION="1.0.0";var h=t._;if(!h&&typeof require!=="undefined")h=require("underscore");a.$=t.jQuery||t.Zepto||t.ender||t.$;a.noConflict=function(){t.Backbone=e;return this};a.emulateHTTP=false;a.emulateJSON=false;var o=a.Events={on:function(t,e,i){if(!l(this,"on",t,[e,i])||!e)return this;this._events||(this._events={});var r=this._events[t]||(this._events[t]=[]);r.push({callback:e,context:i,ctx:i||this});return this},once:function(t,e,i){if(!l(this,"once",t,[e,i])||!e)return this;var r=this;var s=h.once(function(){r.off(t,s);e.apply(this,arguments)});s._callback=e;return this.on(t,s,i)},off:function(t,e,i){var r,s,n,a,o,u,c,f;if(!this._events||!l(this,"off",t,[e,i]))return this;if(!t&&!e&&!i){this._events={};return this}a=t?[t]:h.keys(this._events);for(o=0,u=a.length;o").attr(t);this.setElement(e,false)}else{this.setElement(h.result(this,"el"),false)}}});a.sync=function(t,e,i){var r=k[t];h.defaults(i||(i={}),{emulateHTTP:a.emulateHTTP,emulateJSON:a.emulateJSON});var s={type:r,dataType:"json"};if(!i.url){s.url=h.result(e,"url")||U()}if(i.data==null&&e&&(t==="create"||t==="update"||t==="patch")){s.contentType="application/json";s.data=JSON.stringify(i.attrs||e.toJSON(i))}if(i.emulateJSON){s.contentType="application/x-www-form-urlencoded";s.data=s.data?{model:s.data}:{}}if(i.emulateHTTP&&(r==="PUT"||r==="DELETE"||r==="PATCH")){s.type="POST";if(i.emulateJSON)s.data._method=r;var n=i.beforeSend;i.beforeSend=function(t){t.setRequestHeader("X-HTTP-Method-Override",r);if(n)return n.apply(this,arguments)}}if(s.type!=="GET"&&!i.emulateJSON){s.processData=false}if(s.type==="PATCH"&&window.ActiveXObject&&!(window.external&&window.external.msActiveXFilteringEnabled)){s.xhr=function(){return new ActiveXObject("Microsoft.XMLHTTP")}}var o=i.xhr=a.ajax(h.extend(s,i));e.trigger("request",e,o,i);return o};var k={create:"POST",update:"PUT",patch:"PATCH","delete":"DELETE",read:"GET"};a.ajax=function(){return a.$.ajax.apply(a.$,arguments)};var S=a.Router=function(t){t||(t={});if(t.routes)this.routes=t.routes;this._bindRoutes();this.initialize.apply(this,arguments)};var $=/\((.*?)\)/g;var T=/(\(\?)?:\w+/g;var H=/\*\w+/g;var A=/[\-{}\[\]+?.,\\\^$|#\s]/g;h.extend(S.prototype,o,{initialize:function(){},route:function(t,e,i){if(!h.isRegExp(t))t=this._routeToRegExp(t);if(h.isFunction(e)){i=e;e=""}if(!i)i=this[e];var r=this;a.history.route(t,function(s){var n=r._extractParameters(t,s);i&&i.apply(r,n);r.trigger.apply(r,["route:"+e].concat(n));r.trigger("route",e,n);a.history.trigger("route",r,e,n)});return this},navigate:function(t,e){a.history.navigate(t,e);return this},_bindRoutes:function(){if(!this.routes)return;this.routes=h.result(this,"routes");var t,e=h.keys(this.routes);while((t=e.pop())!=null){this.route(t,this.routes[t])}},_routeToRegExp:function(t){t=t.replace(A,"\\$&").replace($,"(?:$1)?").replace(T,function(t,e){return e?t:"([^/]+)"}).replace(H,"(.*?)");return new RegExp("^"+t+"$")},_extractParameters:function(t,e){var i=t.exec(e).slice(1);return h.map(i,function(t){return t?decodeURIComponent(t):null})}});var I=a.History=function(){this.handlers=[];h.bindAll(this,"checkUrl");if(typeof window!=="undefined"){this.location=window.location;this.history=window.history}};var N=/^[#\/]|\s+$/g;var P=/^\/+|\/+$/g;var O=/msie [\w.]+/;var C=/\/$/;I.started=false;h.extend(I.prototype,o,{interval:50,getHash:function(t){var e=(t||this).location.href.match(/#(.*)$/);return e?e[1]:""},getFragment:function(t,e){if(t==null){if(this._hasPushState||!this._wantsHashChange||e){t=this.location.pathname;var i=this.root.replace(C,"");if(!t.indexOf(i))t=t.substr(i.length)}else{t=this.getHash()}}return t.replace(N,"")},start:function(t){if(I.started)throw new Error("Backbone.history has already been started");I.started=true;this.options=h.extend({},{root:"/"},this.options,t);this.root=this.options.root;this._wantsHashChange=this.options.hashChange!==false;this._wantsPushState=!!this.options.pushState;this._hasPushState=!!(this.options.pushState&&this.history&&this.history.pushState);var e=this.getFragment();var i=document.documentMode;var r=O.exec(navigator.userAgent.toLowerCase())&&(!i||i<=7);this.root=("/"+this.root+"/").replace(P,"/");if(r&&this._wantsHashChange){this.iframe=a.$('