From 58bcc8443017aea9f795f57d533a1576e6e4c1e9 Mon Sep 17 00:00:00 2001 From: Gabor Adam Toth Date: Sat, 27 Feb 2010 23:58:32 +0100 Subject: [PATCH] place/threads: use _log to store data, threaded comments place/archetype.gen: new save log immediately flag to save() after logAppend(): - in place/name.c: define SAVE_LOG_IMMEDIATELY - in local.h: define _flag_save_place_log_immediately define _flag_save_userthreads_immediately place/text: added x() --- world/net/identica/client.c | 12 +- world/net/include/lastlog.h | 15 + world/net/lastlog.c | 43 +- world/net/library/htbasics.c | 3 +- world/net/library/signature.c | 2 +- world/net/library/text.c | 7 + world/net/person.c | 2 +- world/net/place/archetype.gen | 45 +- world/net/place/threads.c | 796 +++++++++++++++++----------------- world/net/place/userthreads.c | 16 +- world/net/twitter/client.c | 2 +- world/static/examine.css | 8 +- 12 files changed, 515 insertions(+), 436 deletions(-) create mode 100644 world/net/include/lastlog.h diff --git a/world/net/identica/client.c b/world/net/identica/client.c index c3ddd98..11a118d 100644 --- a/world/net/identica/client.c +++ b/world/net/identica/client.c @@ -1,7 +1,7 @@ /* identi.ca client, uses the twitter api * http://status.net/wiki/Twitter-compatible_API * - * - register app @ http://identi.ca/settings/oauthapps + * - register app @ https://identi.ca/settings/oauthapps * - then in local.h #define IDENTICA_KEY & IDENTICA_SECRET */ @@ -14,11 +14,11 @@ object load(object usr, string key, string secret, string request, string access display_name = "identi.ca"; api_base_url = "http://identi.ca/api"; - unless (consumer_key) consumer_key = IDENTICA_KEY; - unless (consumer_secret) consumer_secret = IDENTICA_SECRET; - unless (request_token_url) request_token_url = api_base_url + "/oauth/request_token"; - unless (access_token_url) access_token_url = api_base_url + "/oauth/access_token"; - unless (authorize_url) authorize_url = api_base_url + "/oauth/authorize"; + consumer_key = IDENTICA_KEY; + consumer_secret = IDENTICA_SECRET; + request_token_url = api_base_url + "/oauth/request_token"; + access_token_url = api_base_url + "/oauth/access_token"; + authorize_url = api_base_url + "/oauth/authorize"; return ::load(usr, key, secret, request, access, authorize); } diff --git a/world/net/include/lastlog.h b/world/net/include/lastlog.h new file mode 100644 index 0000000..a460efc --- /dev/null +++ b/world/net/include/lastlog.h @@ -0,0 +1,15 @@ +#ifndef LASTLOG_H +#define LASTLOG_H + +// _log fields +#define LOG_SOURCE 0 +#define LOG_SOURCE_OBJ 0 +#define LOG_SOURCE_UNI 1 +#define LOG_MC 1 +#define LOG_DATA 2 +#define LOG_VARS 3 +#define LOG_CHILDREN 4 //only added by place/threads:entries() + +#define LOG_WIDTH 4 + +#endif diff --git a/world/net/lastlog.c b/world/net/lastlog.c index 96d1913..bcd81a5 100644 --- a/world/net/lastlog.c +++ b/world/net/lastlog.c @@ -13,6 +13,7 @@ #endif #include +#include protected array(mixed) _log; @@ -66,6 +67,7 @@ logInit(takeThis) { } logClip(maxlen, cutlen) { + P3(("logClip(%O, %O)\n", maxlen, cutlen)) int howmany; howmany = sizeof(_log); @@ -109,6 +111,7 @@ logView(a, showingLog, defAmount) { mapping m; ll = 0; for(i=0; i= 0) || ((t = m["_nick"]) && strstr(t, grep) >= 0) @@ -145,6 +148,7 @@ logView(a, showingLog, defAmount) { i = sizeof(_log) - ll; } while (i < sizeof(_log)) { + unless (_log[i]) { i+= 4; continue; } #ifndef UNSAFE_LASTLOG msgView((pointerp(_log[i]) ? _log[i++][0] || _log[i-1][1] @@ -159,14 +163,16 @@ logView(a, showingLog, defAmount) { return ll / 4; } -// pick a single message. used by POP3 -logPick(i) { +int logExists(int i) { + i *= 4; + if (i < 0 || i >= sizeof(_log) || !_log[i]) return 0; + return 1; +} + +// pick a single message. used by POP3 & place/threads +array(mixed) logPick(int i) { + unless (logExists(i)) return 0; i *= 4; - if (i < 0) { - i = sizeof(_log) + i; - if (i < 0) return 0; - } - if (i > sizeof(_log)) return 0; #ifndef UNSAFE_LASTLOG return ({ (pointerp(_log[i]) ? _log[i++][0] || _log[i-1][1] @@ -177,8 +183,27 @@ logPick(i) { #endif /* UNSAFE_LASTLOG */ } +varargs public int logSize(string mc) { + unless (mc) return sizeof(_log) / 4; + + int i, n = 0; + for (i = 0; i < sizeof(_log); i += 4) + if (_log[i] && abbrev(mc, _log[i])) n++; + + return n; +} + +int logSet(int i, array(mixed) item) { + if (i < 0 || i > logSize()) return 0; + if (i == logSize()) { + _log += item; + } else { + i *= 4; + _log[i..i+3] = item[0..3]; + } + return 1; +} + // used to make a temporary copy of the log, in POP3 public logQuery() { return _log; } -public logSize() { return sizeof(_log) / 4; } - diff --git a/world/net/library/htbasics.c b/world/net/library/htbasics.c index de76421..7d92f33 100644 --- a/world/net/library/htbasics.c +++ b/world/net/library/htbasics.c @@ -6,12 +6,13 @@ // to make sure they won't trigger // html commands // -string htquote(string s) { +varargs string htquote(string s, int newlines) { ASSERT("htquote", stringp(s), s) s = replace(s, "&", "&"); // s = replace(s, "\"", """); //" s = replace(s, "<", "<"); s = replace(s, ">", ">"); + if (newlines) s = replace(s, "\n", "
\n"); return s; } diff --git a/world/net/library/signature.c b/world/net/library/signature.c index b03a34f..7e9a709 100644 --- a/world/net/library/signature.c +++ b/world/net/library/signature.c @@ -80,7 +80,7 @@ private volatile mapping _sigs = ([ "_request_ent": ({ "_request_entry", 0, "_id" }), "_request_comment": ({ "_request_comment", 0, "_id", "_text" }), "_request_com": ({ "_request_comment", 0, "_id", "_text" }), - "_request_thread": ({ "_request_thread", 0, "_id", "_title" }), + "_request_title": ({ "_request_title", 0, "_id", "_title" }), "_request_addentry": ({ "_request_addentry", 0, "_text" }), "_request_addent": ({ "_request_addentry", 0, "_text" }), "_request_submit": ({ "_request_addentry", 0, "_text" }), diff --git a/world/net/library/text.c b/world/net/library/text.c index e32a67f..25e8b04 100644 --- a/world/net/library/text.c +++ b/world/net/library/text.c @@ -172,3 +172,10 @@ varargs void w(string mc, string data, mixed vars) { } #endif +// a simple implementation of perl's x operator +string x(string str, int n) { + int i; + string res = ""; + for (i = 0; i < n; i++) res += str; + return res; +} diff --git a/world/net/person.c b/world/net/person.c index 081e660..d37417d 100644 --- a/world/net/person.c +++ b/world/net/person.c @@ -473,7 +473,7 @@ qDescription(source, vars, profile, itsme) { foreach (string c : v("channels")) { object p = find_place(c); unless (objectp(p) && (p->isPublic() || (source && p->qMember(source))) /*&& p->numEntries() > 0*/) continue; - channels += ([ p->qChannel(): p->entries(10)]); + channels += ([ p->qChannel(): p->entries(10, 0, 1)]); } // don't make_json for anonymous queries which are handled locally dv["_channels"] = source ? make_json(channels) : channels; diff --git a/world/net/place/archetype.gen b/world/net/place/archetype.gen index c39758a..339b1d4 100644 --- a/world/net/place/archetype.gen +++ b/world/net/place/archetype.gen @@ -208,6 +208,22 @@ private volatile string _logfile; qLogging() { return v("logging"); } #endif +int qSaveImmediately() { +#if defined(SAVE_LOG_IMMEDIATELY) || defined(_flag_save_place_log_immediately) + return 1; +#else + return 0; +#endif +} + +int qHistoryPersistentLimit() { + return _limit_amount_history_persistent; +} + +int qHistoryExportLimit() { + return _limit_amount_history_export; +} + // to be overloaded by place.gen qNewsfeed() { return 0; } // _request_list_feature uses this in *all* place objects, dont ifdef @@ -561,8 +577,7 @@ htget(prot, query, headers, qs, data, noprocess) { unless (noprocess) { if (query["amount"]) { sscanf(query["amount"], "%d", a); - a = a < _limit_amount_history_export ? a : - _limit_amount_history_export; + a = a < qHistoryExportLimit() ? a : qHistoryExportLimit(); P4(("%O amount is %O\n", ME, a)) } switch(query["format"]) { @@ -741,7 +756,7 @@ insert_member(source, mc, data, vars, ni, neu, again) { // NEW: if OWNERS have not been provided by place.gen // we'll make the first guy who walks in our owner. unless (v("owners")) { - vSet("owners", ([ SNICKER: source ])); + vSet("owners", ([ lower_case(SNICKER): source ])); // don't send _warning_place_duty_owner // before acquitting enter operation.. vars["_duty"] = "owner"; // _owner_new ? @@ -1515,6 +1530,7 @@ castmsg(source, mc, data, vars) { # endif logAppend(source, mc, data, vars, 0, "_time_place"); _histChange++; + if (qSaveImmediately()) save(); // cannot just call ::castmsg after logAppend because // logAppend adds the _time_place var so i have to // patch around one way or the other @@ -1624,13 +1640,13 @@ void create() { #ifdef PLACE_HISTORY void reset(int again) { // ::reset(again); - if (_histChange) { - logClip(2 * _limit_amount_history_persistent, - _limit_amount_history_persistent); - save(); - P2(("RESET: %O stores its history (+%O)\n", - ME, _histChange)) - } + if (_histChange) { + if (qHistoryPersistentLimit()) + logClip(2 * qHistoryPersistentLimit(), qHistoryPersistentLimit()); + save(); + P2(("RESET: %O stores its history (+%O)\n", ME, _histChange)) + } + _histChange = 0; #if 0 //ndef NOCLEANUP // keep the server clean. unused places may exit. @@ -2310,7 +2326,7 @@ _request_set_style(source, mc, data, vars, b) { string value = vars["_uniform_style"] || vars["_value"]; if (value && (value = legal_url(value, "http"))) vSet("_uniform_style", value); - else { + else if (value) { sendmsg(source, "_error_illegal_scheme", "That is not a valid [_scheme] URL for a file.", @@ -2583,6 +2599,10 @@ sAide(whom) { int ret; mapping aides = v("aides") || ([]); + // change local uniform to nick + array(mixed) u = parse_uniform(whom); + if (u && is_localhost(lower_case(u[UHost]))) whom = u[UResource]; + t = lower_case(whom); if (aides[t]) { aides -= ([ t ]); @@ -2612,7 +2632,8 @@ listAides(source) { qAide(snicker, aidesonly) { // never call with objectp.. use SNICKER // if (objectp(whom)) whom = whom->qName(); - snicker = lower_case(snicker); // should we enforce SNICKER to be lc? + + snicker = lower_case(snicker); // should we enforce SNICKER to be lc? yes! if (!aidesonly && sizeof(v("owners")) && member(v("owners"), snicker)) return 4; unless (mappingp(v("aides"))) return 0; return v("aides")[snicker]; diff --git a/world/net/place/threads.c b/world/net/place/threads.c index 5f62e15..c48e9d6 100644 --- a/world/net/place/threads.c +++ b/world/net/place/threads.c @@ -3,8 +3,10 @@ #include #include #include +#include -inherit NET_PATH "place/owned"; +#define PLACE_HISTORY +#define _limit_amount_history_persistent 0 #ifndef DEFAULT_BACKLOG # define DEFAULT_BACKLOG 10 @@ -14,60 +16,159 @@ inherit NET_PATH "place/owned"; # define STYLESHEET (v("_uniform_style") || "/static/examine.css") #endif -// datenstruktur für threads? -// -// bestehende struktur ist: großes array von entries. -// -// wie wärs mit mapping mit key=threadname und value=array-of-entries -// subjects werden abgeschafft: sie sind der name des threads -// wer einen thread in seinem reply umnennen will legt in wirklichkeit -// einen neuen thread an, meinetwegen mit "was: old thread" -// -// der nachteil an solch einer struktur wäre, dass man neue comments -// in alten threads nicht so schnell findet - man ist auf die notification -// angewiesen, was andererseits die stärke von psycblogs ist. -// man könnte die notifications zudem noch in die history einspeisen.. -// -// nachteile an der bestehenden struktur ist: 1. threadname in jeder -// entry, 2. threads nur mittels durchlauf des ganzen blogs darstellbar -// -// momentmal.. das was du "comments" nennst sind doch schon die threads! +inherit NET_PATH "place/owned"; -protected mapping* _thread; +qHistoryPersistentLimit() { + return 0; +} -volatile int last_modified; -volatile string webact; +canPost(snicker) { + return qAide(snicker); +} + +canDeleteOwn(snicker) { + return qAide(snicker); +} + +canDeleteEverything(snicker) { + return qOwner(snicker); +} + +int mayLog(string mc) { + return abbrev("_notice_thread", mc) || abbrev("_message", mc); +} + +int showWebLog() { + return 1; +} + +int numEntries() { + return logSize("_notice_thread"); +} create() { P3((">> threads:create()\n")) ::create(); - unless (pointerp(_thread)) _thread = ({ }); + + //index entries from 1 + logSet(0, ({0, 0, 0, 0})); +} + +varargs array(mixed) entries(int limit, int offset, int reverse, int parent, int id) { + P3((">> entries(%O, %O, %O)\n", limit, offset, parent)) + array(mixed) entries = ({}), entry, children, child; + mapping vars; + int i, n = 0, o = 0; + int from = id || logSize() - 1; + int to = id || parent || 0; + for (i = from; i >= to; i--) { + unless (logPick(i)) continue; + entry = logPick(i); + unless (abbrev("_notice_thread", entry[LOG_MC])) continue; + PT((">>> entry %O: %O\n", i, entry)) + vars = entry[LOG_VARS]; + if (vars["_parent"] != parent) continue; + if (o++ < offset) continue; + children = ({}); + if (member(vars, "_children")) { + foreach (int c : vars["_children"]) { + if (child = logPick(c)) { + children += ({ child + ({ entries(0, 0, reverse, c) }) }); + } + } + } + PT((">>> adding %O: %O\n", i, entry)) + if (reverse) { + entries += ({ entry + ({ children }) }); + } else { + entries = ({ entry + ({ children }) }) + entries; + } + if (limit && ++n >= limit) break; + } + PT((">>> entries: %O\n", entries)) + return entries; +} + +varargs array(mixed) entry(int id) { + return entries(0, 0, 0, 0, id); +} + +varargs int addEntry(mixed source, string snicker, string text, string title, int parent_id) { + P3((">> addEntry(%O, %O, %O, %O, %O)\n", source, snicker, text, title, parent_id)) + int id = logSize(); + string mc = "_notice_thread_entry"; + string data = "[_nick] [_action]: "; + + mapping vars = ([ + "_id": id, + "_text": text, + "_nick": snicker, + "_action": "adds", //TODO: add a /set'ting for it, or find a better name + ]); + + if (parent_id) { + P3((">>> parent_id: %O\n", parent_id)) + array(mixed) parent; + unless (parent = logPick(parent_id)) return 0; + P3((">>> parent: %O\n", parent)) + unless (parent[LOG_VARS]["_children"]) parent[LOG_VARS]["_children"] = ({ }); + parent[LOG_VARS]["_children"] += ({ id }); + save(); + + mc += "_reply"; + data = member(parent[LOG_VARS], "_title") ? + "[_nick] [_action] in reply to #[_parent] ([_parent_title]): " : + "[_nick] [_action] in reply to #[_parent]: ", + vars += ([ "_parent": parent_id ]); + } + + if (title && strlen(title)) { + vars += ([ "_title": title ]); + data += "[_title]\n[_text]"; + } else { + data += "[_text]"; + } + + data += " (#[_id] in [_nick_place])"; + + castmsg(source, mc, data, vars); + return 1; +} + +int delEntry(int id, mixed source, mapping vars) { + array(mixed) entry; + unless (entry = logPick(id)) return 0; + + string unick; + unless (canDeleteEverything(SNICKER)) + unless (canDeleteOwn(SNICKER) && lower_case(psyc_name(source)) == lower_case(entry[LOG_SOURCE][LOG_SOURCE_UNI])) + return 0; + + logSet(id, ({0,0,0,0})); + save(); + return 1; +} + +sendEntries(mixed source, array(mixed) entries, int level) { + P3((">> sendEntries(%O, %O)\n", source, entries)) + mapping vars; + int n = 0; + unless(source && entries) return n; + foreach(array(mixed) entry : entries) { + PT(("entry: %O\n", entry)) + vars = entry[LOG_VARS]; + sendmsg(source, regreplace(entry[LOG_MC], "^_notice", "_list", 1), + "[_indent][_nick]: " + (vars["_title"] ? "[_title]\n" : "") + "[_text] (#[_id])", + vars + ([ "_level": level, "_indent": x(" ", level) ])); + if (sizeof(entry) >= LOG_CHILDREN + 1) sendEntries(source, entry[LOG_CHILDREN], level + 1); + n++; + } + return n; } _request_entries(source, mc, data, vars, b) { int num = to_int(vars["_num"]) || DEFAULT_BACKLOG; - array(mapping) entries = ({ }); - mapping entry; - - for (int i = sizeof(_thread) - 1; i >= 0; i--) { - unless (entry = _thread[i]) continue; - entries = - ({ ([ - "_sep" : strlen(entry["thread"]) ? " - " : "", - "_thread" : entry["thread"], - "_text" : entry["text"], - "_author" : entry["author"], - "_date" : entry["date"], - "_comments": sizeof(entry["comments"]), - "_id" : i, - "_nick_place" : MYNICK, - ]) }) + entries; - if (sizeof(entries) == num) break; - } - foreach(entry : entries) - sendmsg(source, "_list_thread_entry", - "#[_id] - [_author][_sep][_thread]: [_text] ([_comments])", - entry); + sendEntries(source, entries(num)); return 1; } @@ -78,60 +179,29 @@ _request_entry(source, mc, data, vars, b) { return 1; } - mapping entry; int id = to_int(vars["_id"]); - - if (id >= 0 && id < sizeof(_thread)) - entry = _thread[id]; - - unless (entry) { + unless(sendEntries(source, entry(id))) { sendmsg(source, "_error_thread_invalid_entry", "#[_id]: no such entry", (["_id": id])); - return 1; } - sendmsg(source, "_list_thread_entry", - "#[_id] [_author][_sep][_thread]: [_text] ([_comments])", - ([ - "_sep" : strlen(entry["thread"]) ? " - " : "", - "_thread" : entry["thread"], - "_text" : entry["text"], - "_author" : entry["author"], - "_date" : entry["date"], - "_comments": sizeof(entry["comments"]), - "_id" : id, - "_nick_place" : MYNICK ]) ); - - if (entry["comments"]) { - foreach(mapping item : entry["comments"]) { - sendmsg(source, "_list_thread_comment", - "> [_nick]: [_text]", - ([ - "_nick" : item["nick"], - "_text" : item["text"], - "_date": item["date"], - "_nick_place" : MYNICK ]) ); - } - } return 1; } -_request_thread(source, mc, data, vars, b) { - unless (vars["_id"] && strlen(vars["_id"])) { - sendmsg(source, "_warning_usage_thread", - "Usage: /thread ", ([ ])); +_request_addentry(source, mc, data, vars, b) { + P3((">> _request_addentry(%O, %O, %O, %O, %O)\n", source, mc, data, vars, b)) + unless (canPost(SNICKER)) return 0; + unless (vars["_text"] && strlen(vars["_text"])) { + sendmsg(source, "_warning_usage_addentry", + "Usage: /addentry <text>", ([ ])); return 1; } - - int id = to_int(vars["_id"]); - unless (setSubject(id, vars["_title"])) - sendmsg(source, "_error_thread_invalid_entry", - "#[_id]: no such entry", (["_id": id])); - + addEntry(source, SNICKER, vars["_text"], vars["_title"]); return 1; } _request_comment(source, mc, data, vars, b) { + P3((">> _request_comment(%O, %O, %O, %O, %O)\n", source, mc, data, vars, b)) unless (vars["_id"] && strlen(vars["_id"]) && vars["_text"] && strlen(vars["_text"])) { sendmsg(source, "_warning_usage_reply", @@ -140,25 +210,17 @@ _request_comment(source, mc, data, vars, b) { } int id = to_int(vars["_id"]); - unless (addComment(vars["_text"], SNICKER, id)) + string snicker = SNICKER; + P3((">>> id: %O, vars: %O\n", id, vars)); + unless (addEntry(source, snicker, vars["_text"], vars["_title"], id)) sendmsg(source, "_error_thread_invalid_entry", "#[_id]: no such entry", (["_id": id])); return 1; } -_request_addentry(source, mc, data, vars, b) { - unless (canPost(SNICKER)) return 0; - unless (vars["_text"] && strlen(vars["_text"])) { - sendmsg(source, "_warning_usage_addentry", - "Usage: /addentry <text>", ([ ])); - return 1; - } - addEntry(vars["_text"], SNICKER); - return 1; -} - _request_delentry(source, mc, data, vars, b) { + P3((">> _request_delentry(%O, %O, %O, %O, %O)\n", source, mc, data, vars, b)) unless (canPost(SNICKER)) return 0; unless (vars["_id"] && strlen(vars["_id"])) { sendmsg(source, "_warning_usage_delentry", @@ -177,6 +239,24 @@ _request_delentry(source, mc, data, vars, b) { return 1; } +#if 0 +_request_title(source, mc, data, vars, b) { + P3((">> _request_title(%O, %O, %O, %O, %O)\n", source, mc, data, vars, b)) + unless (vars["_id"] && strlen(vars["_id"])) { + sendmsg(source, "_warning_usage_title", + "Usage: /title <id> <title>", ([ ])); + return 1; + } + + int id = to_int(vars["_id"]); + unless (setTitle(id, vars["_title"])) + sendmsg(source, "_error_thread_invalid_entry", + "#[_id]: no such entry", (["_id": id])); + + return 1; +} +#endif + msg(source, mc, data, vars){ P3(("thread:msg(%O, %O, %O, %O)", source, mc, data, vars)) // TODO: die source muss hierbei uebereinstimmen mit dem autor @@ -192,81 +272,174 @@ msg(source, mc, data, vars){ return ::msg(source, mc, data, vars); } -setSubject(id, thread) { - unless (_thread && id >= 0 && id <= sizeof(_thread) && _thread[id]) return 0; - _thread[id]["thread"] = thread; - save(); - return 1; -} - -// TODO: topic uebergeben -addEntry(text, unick, thread) { - int id = sizeof(_thread); - mapping newentry = ([ - "id": id, - "text": text, - "author": unick, - "date": time(), - "thread": thread || "", - ]); - _thread += ({ newentry }); - save(); - castmsg(ME, "_notice_thread_entry", - thread ? - "[_nick] adds an entry in [_nick_place] (#[_id]): \"[_thread]\":\n[_entry]" : - "[_nick] adds an entry in [_nick_place] (#[_id]):\n[_entry]", - ([ - "_entry": text, - "_id": id, - "_thread": thread, - "_nick": unick, - ])); - return 1; -} - -addComment(text, unick, id) { - mapping entry; - unless (_thread && id >= 0 && id <= sizeof(_thread) && _thread[id]) return 0; - - entry = _thread[id]; - unless (entry["comments"]) { - entry["comments"] = ({ }); +varargs string htmlComments(array(mixed) entries, int level) { + mapping entry, vars; + string ht = "", style; + foreach(entry : entries) { + vars = entry[LOG_VARS]; + style = level ? "style='padding-left: " + level + "em'" : ""; + ht += "<div class='comment' title='" + isotime(ctime(vars["_time_place"]), 1) + "' " + style + "><span class='comment-author'>" + vars["_nick"] + "</span>: <span class='comment-text'>" + htquote(vars["_text"], 1) + "</span></div>\n"; + if (sizeof(entry) >= LOG_CHILDREN + 1) ht += htmlComments(entry[LOG_CHILDREN], level + 1); } - int date = time(); - entry["comments"] += ({ (["text" : text, "nick" : unick, "date": date ]) }); - // vSet("entries", entries); - save(); - castmsg(ME, "_notice_thread_comment", - entry["thread"] && strlen(entry["thread"]) ? - "[_nick] adds a comment to \"[_thread]\" (entry #[_id]) of [_nick_place]:\n[_comment]" : - "[_nick] adds a comment to entry #[_id] of [_nick_place]:\n[_comment]", - ([ - "_entry" : entry["text"], - "_id" : id, - "_thread" : entry["thread"], - "_comment" : text, - "_nick" : unick, - "_date": date, - ])); - return 1; + return ht; } -delEntry(int id, source, vars) { - unless (_thread && id >= 0 && id <= sizeof(_thread) && _thread[id]) return 0; +varargs string htmlEntries(array(mixed) entries, int nojs, string chan, string submit, string url_prefix) { + P3((">> threads:htmlentries(%O, %O, %O, %O, %O)\n", entries, nojs, chan, submit, url_prefix)) + string text, ht = ""; + string id_prefix = chan ? chan + "-" : ""; + unless (url_prefix) url_prefix = ""; + unless (nojs) ht += + "<script type='text/javascript'>\n" + "function toggle(e) { if (typeof e == 'string') e = document.getElementById(e); e.className = e.className.match('hidden') ? e.className.replace(/ *hidden/, '') : e.className + ' hidden'; }\n" + "</script>\n"; - array(string) entries, authors, a; - string unick; + mapping entry, vars; + foreach (entry : entries) { + P3((">>> entry: %O\n", entry)) + vars = entry[LOG_VARS]; - if (canPost(unick = lower_case(SNICKER))) { - unless (lower_case(_thread[id]["author"]) == unick) return 0; + text = htquote(vars["_text"], 1); + + string comments = ""; + if (sizeof(entry) >= LOG_CHILDREN + 1) comments = htmlComments(entry[LOG_CHILDREN]); + + ht += + "<div class='entry'>\n" + "<div class='header'>\n" + "<a href=\"" + url_prefix + "?id=" + vars["_id"] + "\">" + "<span class='id'>#" + vars["_id"] + "</span> - \n" + "<span class='author'>" + vars["_nick"] + "</span>\n" + + (vars["_title"] && strlen(vars["_title"]) ? " - " : "") + + "<span class='title'>" + htquote(vars["_title"] || "") + "</span>\n" + "</a>" + "</div>\n" + "<div class='body'>\n" + "<div class='text'>" + text + "</div>\n" + "<div id='comments-" + id_prefix + vars["_id"] + "' class='comments'>" + comments + + (submit && strlen(submit) ? + "<a onclick=\"toggle(this.nextSibling)\">» reply</a>" + "<div class='comment-submit hidden'>" + "<textarea autocomplete='off'></textarea>" + //FIXME: cmd is executed twice, because after a set-cookie it's parsed again + "<input type='button' value='Send' onclick=\"cmd('comment " + vars["_id"] + " '+ this.previousSibling.value, '" + submit + "')\">" + "</div>" : "") + + "</div>\n" + "</div>\n" + "<div class='footer'>\n" + "<span class='date'>" + isotime(ctime(vars["_time_place"]), 1) + "</span>\n" + "<span class='comments-link'>" + "<a onclick=\"toggle('comments-" + id_prefix + vars["_id"] + "')\">" + sizeof(vars["_children"]) + " comments</a>" + "</span>\n" + "</div>\n" + "</div>\n"; + } + P3((">>> ht: %O\n", ht)) + return "<div class='threads'>" + ht + "</div>"; +} + +// TODO: fix markup, not displayed correctly (in firefox at least) +string rssEntries(array(mixed) entries) { + string rss = + "<?xml version=\"1.0\" encoding=\"" SYSTEM_CHARSET "\" ?>\n" + "<rdf:RDF\n" + "xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n" + "xmlns=\"http://purl.org/rss/1.0/\">\n\n" + "<channel>\n" + "\t<title>PSYC - Protocol for Synchronous Conferencing\n" + "\thttp://www.psyc.eu\n" + "\tNews about the PSYC project\n" + "\n"; + + mapping entry, vars; + foreach (entry : entries) { + vars = entry[LOG_VARS]; + rss += + "\n\n" + "\t"+ (vars["_title"] || "no title") +"\n" + "\thttp://" + HTTP_OR_HTTPS_URL + "/" + pathName() + "?id=" + vars["_id"] + "\n" + "\t" + vars["_text"] + "\n" + "\t" + isotime(ctime(vars["_time_place"]), 1) + "\n" + "\t" + vars["_nick"] + "\n" + "\n"; } - //_thread = _thread[0..id-1] + _thread[id+1..]; - // set to 0 instead so entry ids won't change - _thread[id] = 0; - save(); + rss += "\n"; + return rss; +} - return 1; +string jsEntries(array(mixed) entries) { + string js = + "function Entry(id, thread, author, date, text) {\n" + "\tthis.id = id;\n" + "\tthis.thread = thread;\n" + "\tthis.author = author;\n" + "\tthis.date = date;\n" + "\tthis.text = text;\n" + "}\n\n" + "document.blogentries = new Array(\n"; + + mapping entry, vars; + foreach (entry : entries) { + vars = entry[LOG_VARS]; + js += "new Entry(" + vars["_id"] + "," + "\"" + vars["_title"] + "\"," + "\"" + vars["_nick"] + "\"," + + isotime(ctime(vars["_time_place"]), 1) + "," + "\"" + vars["_text"] + "\"),\n"; + } + + return js[..<3] + ");"; +} + +varargs string jsonEntries(int limit, int offset) { + return make_json(entries(limit, offset)); +} + +varargs void jsonExport(int limit, int offset) { + write(jsonEntries(limit, offset)); +} + +varargs void jsExport(int limit, int offset) { + write(jsEntries(entries(limit, offset))); +} + +varargs void rssExport(int limit, int offset) { + write(rssEntries(entries(limit, offset, 1))); +} + +varargs string htMain(int limit, int offset, string chan) { + return htmlEntries(entries(limit, offset, 1), 0, chan); +} + +varargs void displayMain(int limit, int offset) { + write(htMain(limit, offset)); +} + +string htEntry(int id) { + return htmlEntries(entry(id)); +} + +void displayEntry(int id) { + write(htEntry(id) || "No such entry."); +} + +// wir können zwei strategien fahren.. die technisch einfachere ist es +// die reihenfolge der elemente festzulegen und für jedes ein w(_HTML_xy +// auszuspucken. flexibler wär's stattdessen wenn jede seite ein einziges +// w(_PAGES_xy ausgeben würde in dem es per [_HTML_list_threads] oder +// ähnlichem die blog-elemente per psyctext-vars übergibt ... dann kann +// es immernoch per {_HTML_head_threads} header und footer einheitlich +// halten. womöglich kann man auch nachträglich plan A in plan B +// umwandeln..... hmmm -lynX +// +void displayHeader() { + w("_HTML_head_threads", + "\n"+ + "\n\n"); +} +void displayFooter() { + w("_HTML_tail_threads", ""); } htget(prot, query, headers, qs, data) { @@ -276,8 +449,7 @@ htget(prot, query, headers, qs, data) { int a; int limit = to_int(query["limit"]) || DEFAULT_BACKLOG; int offset = to_int(query["offset"]); - - unless (webact) webact = PLACE_PATH + MYLOWERNICK; + string webact = PLACE_PATH + MYLOWERNICK; // shouldnt it be "html" here? sTextPath(query["layout"] || MYNICK, query["lang"], "ht"); @@ -382,7 +554,7 @@ htget(prot, query, headers, qs, data) { rssExport(limit, offset); } else { // normaler Export - P2(("all entries: %O\n", _thread)) + //P2(("all entries: %O\n", _thread)) htok3(prot, "text/html", "Cache-Control: no-cache\n"); displayHeader(); // display the blog @@ -395,243 +567,75 @@ htget(prot, query, headers, qs, data) { return 1; } -entries(int limit, int offset) { - array(mapping) entries = ({ }); - int i, n = 0, o = 0; - for (i = sizeof(_thread) - 1; i >= 0; i--) { - P3((">>> _thread[%O]: %O\n", i, _thread[i])) - unless (_thread[i]) continue; - if (o++ < offset) continue; - entries += ({ _thread[i] }); - if (++n >= limit) break; +void nntpget(string cmd, string args) { + array(mixed) entry, entries; + mapping vars; + int i; + P2(("calling nntpget %s with %O\n", cmd, args)) + switch(cmd) { + case "LIST": + write(MYNICK + " 0 1 n\n"); + break; + case "ARTICLE": + i = to_int(args) - 1; + //P2(("i is: %d\n", i)) + unless (entry = entry(i)) break; + vars = entry[LOG_VARS]; + write(S("220 %d <%s%d@%s> article\n", + i + 1, MYNICK, i + 1, SERVER_HOST)); + write(S("From: %s\n", vars["_nick"])); + write(S("Newsgroups: %s\n", MYNICK)); + write(S("Subject: %s\n", vars["_title"])); + write(S("Date: %s\n", isotime(ctime(vars["_time_place"]), 1))); + write(S("Xref: %s %s:%d\n", SERVER_HOST, MYNICK, i + 1)); + write(S("Message-ID: <%s$%d@%s>\n", MYNICK, i+1, SERVER_HOST)); + write("\n"); + write(vars["_text"]); + write("\n.\n"); + break; + case "GROUP": + write(S("211 %d 1 %d %s\n", numEntries(), numEntries(), MYNICK)); + break; + case "XOVER": + entries = entries(); + foreach (entry : entries) { + unless (entry = entry(i)) break; + vars = entry[LOG_VARS]; + write(S("%d\t%s\t%s\t%s <%s%d@%s>\t1609\t22\tXref: news.t-online.com\t%s:%d\n", + i+1, vars["_title"], + vars["_nick"], isotime(ctime(vars["_time_place"]), 1), + MYNICK, i+1, + SERVER_HOST, MYNICK, i+1)); + } + break; + default: + P2(("unimplemented nntp command: %s\n", cmd)) } - return entries; } -htmlEntries(array(mapping) entries, int nojs, string chan, string submit, string url_prefix) { - P3((">> threads:htmlentries(%O, %O, %O, %O)\n", entries, nojs, chan, submit)) - string t, ht = ""; - string id_prefix = chan ? chan + "-" : ""; - unless(url_prefix) url_prefix = ""; - unless (nojs) ht += - "\n"; - foreach (mapping entry : entries) { - P3((">>> entry: %O\n", entry)) - unless (entry) continue; - t = htquote(entry["text"]); - t = replace(t, "\n", "
\n"); - t = replace(t, "<", "<"); - t = replace(t, ">", ">"); +/**** old stuff ****/ - string c = ""; - if (entry["comments"]) - foreach(mapping comment : entry["comments"]) - c += "
" + comment["nick"] + ": " + comment["text"] + "
\n"; - - ht += - "
\n" - "\n" - "
\n" - "
" + t + "
\n" - "
" + c + - (submit && strlen(submit) ? - "» reply" - "" : "") + - "
\n" - "
\n" - "\n" - "
\n"; - } - P3((">>> ht: %O\n", ht)) - return "
" + ht + "
"; -} - -rssEntries(array(mapping) entries) { - string rss = - "\n" - "\n\n" - "\n" - "\tPSYC - Protocol for Synchronous Conferencing\n" - "\thttp://www.psyc.eu\n" - "\tNews about the PSYC project\n" - "\n"; - - foreach (mapping entry : entries) { - rss += - "\n\n" - "\t"+ entry["thread"] +"\n" - "\thttp://" + SERVER_HOST + ":33333" + webact + "?id=" + entry["id"] + "\n" - "\t" + entry["text"] + "\n" - "\t" + isotime(ctime(entry["date"]), 1) + "\n" - "\t" + entry["author"] + "\n" - "\n"; - } - - rss += "\n"; - return rss; -} - -jsEntries(array(mapping) entries) { - string js = - "function Entry(id, thread, author, date, text) {\n" - "\tthis.id = id;\n" - "\tthis.thread = thread;\n" - "\tthis.author = author;\n" - "\tthis.date = date;\n" - "\tthis.text = text;\n" - "}\n\n" - "document.blogentries = new Array(\n"; - - foreach (mapping entry : entries) { - js += "new Entry(" + entry["id"] + "," - "\"" + entry["thread"] + "\"," - "\"" + entry["author"] + "\"," - + isotime(ctime(entry["date"]), 1) + "," - "\"" + entry["text"] + "\"),\n"; - } - - return js[..<3] + ");"; -} - -jsonEntries(int limit, int offset) { - return make_json(entries(limit, offset)); -} - -jsonExport(int limit, int offset) { - write(jsonEntries(limit, offset)); -} - -jsExport(int limit, int offset) { - write(jsEntries(limit, offset)); -} - -rssExport(int limit, int offset) { - write(rssEntries(entries(limit, offset))); -} - -htMain(int limit, int offset, string chan) { - return htmlEntries(entries(limit, offset), 0, chan); -} - -displayMain(int limit, int offset) { - write(htMain(limit, offset)); -} - -htEntry(int id) { - unless (_thread && id >= 0 && id <= sizeof(_thread) && _thread[id]) return 0; - return htmlEntries(({ _thread[id] })); -} - -displayEntry(int id) { - write(htEntry(id) || "No such entry."); -} - -// wir können zwei strategien fahren.. die technisch einfachere ist es -// die reihenfolge der elemente festzulegen und für jedes ein w(_HTML_xy -// auszuspucken. flexibler wär's stattdessen wenn jede seite ein einziges -// w(_PAGES_xy ausgeben würde in dem es per [_HTML_list_threads] oder -// ähnlichem die blog-elemente per psyctext-vars übergibt ... dann kann -// es immernoch per {_HTML_head_threads} header und footer einheitlich -// halten. womöglich kann man auch nachträglich plan A in plan B -// umwandeln..... hmmm -lynX +// datenstruktur für threads? // -displayHeader() { - w("_HTML_head_threads", - "\n"+ - "\n\n"); -} -displayFooter() { - w("_HTML_tail_threads", ""); -} - - -nntpget(cmd, args) { - mapping item; - int i; - P2(("calling nntpget %s with %O\n", cmd, args)) - switch(cmd) { -case "LIST": - write(MYNICK + " 0 1 n\n"); - break; -case "ARTICLE": - i = to_int(args) - 1; - P2(("i is: %d\n", i)) - P2(("entries: %O\n", _thread)) - unless (_thread && i >= 0 && i <= sizeof(_thread) && _thread[i]) break; - item = _thread[i]; - write(S("220 %d <%s%d@%s> article\n", - i + 1, MYNICK, i + 1, SERVER_HOST)); - write(S("From: %s\n", item["author"])); - write(S("Newsgroups: %s\n", MYNICK)); - write(S("Subject: %s\n", item["thread"])); - write(S("Date: %s\n", isotime(ctime(item["date"]), 1))); - write(S("Xref: %s %s:%d\n", SERVER_HOST, MYNICK, i + 1)); - write(S("Message-ID: <%s$%d@%s>\n", MYNICK, i+1, SERVER_HOST)); - write("\n"); - write(item["text"]); - write("\n.\n"); - break; -case "GROUP": - write(S("211 %d 1 %d %s\n", sizeof(_thread), - sizeof(_thread), MYNICK)); - break; -case "XOVER": - for (i = 0; i < sizeof(_thread); i++) { - unless(item = _thread[i]) continue; - P2(("item: %O\n", item)) - write(S("%d\t%s\t%s\t%s <%s%d@%s>\t1609\t22\tXref: news.t-online.com\t%s:%d\n", - i+1, item["thread"], - item["author"], isotime(ctime(item["date"]), 1), - MYNICK, i+1, - SERVER_HOST, MYNICK, i+1)); - } - break; -default: - P2(("unimplemented nntp command: %s\n", cmd)) - - } -} - -canPost(snicker) { - return qAide(snicker); -} - -mayLog(mc) { - return abbrev("_notice_thread", mc) || abbrev("_message", mc); -} - -showWebLog() { - return 1; -} - -numEntries() { - return sizeof(_thread); -} - -// old stuff +// bestehende struktur ist: großes array von entries. +// +// wie wärs mit mapping mit key=threadname und value=array-of-entries +// subjects werden abgeschafft: sie sind der name des threads +// wer einen thread in seinem reply umnennen will legt in wirklichkeit +// einen neuen thread an, meinetwegen mit "was: old thread" +// +// der nachteil an solch einer struktur wäre, dass man neue comments +// in alten threads nicht so schnell findet - man ist auf die notification +// angewiesen, was andererseits die stärke von psycblogs ist. +// man könnte die notifications zudem noch in die history einspeisen.. +// +// nachteile an der bestehenden struktur ist: 1. threadname in jeder +// entry, 2. threads nur mittels durchlauf des ganzen blogs darstellbar +// +// momentmal.. das was du "comments" nennst sind doch schon die threads! #if 0 _request_iterator(source, mc, data, vars, b) { diff --git a/world/net/place/userthreads.c b/world/net/place/userthreads.c index 7439f36..7c4d3d3 100644 --- a/world/net/place/userthreads.c +++ b/world/net/place/userthreads.c @@ -4,8 +4,6 @@ #define BLAME "!configuration" #define DONT_REWRITE_NICKS -#define PLACE_HISTORY -#define PLACE_OWNED #define HISTORY_GLIMPSE 12 #include @@ -28,7 +26,7 @@ load(name, keep) { P3((">> userthreads:load(%O, %O)\n", name, keep)) sscanf(name, "~%s#%s", owner, channel); - vSet("owners", ([ owner: 0 ])); + vSet("owners", ([ lower_case(owner) ])); vSet("privacy", "private"); vSet("twitter", 0); vSet("identica", 0); @@ -176,8 +174,9 @@ _request_identica(source, mc, data, vars, b) { } #endif -addEntry(text, unick, thread) { - if (::addEntry(text, unick, thread)) { +varargs int addEntry(mixed source, string snicker, string text, string title, int parent_id) { + int ret; + if (ret = ::addEntry(source, snicker, text, title, parent_id)) { #ifdef TWITTER if (v("twitter") && twitter) twitter->status_update(text); #endif @@ -185,6 +184,7 @@ addEntry(text, unick, thread) { if (v("identica") && identica) identica->status_update(text); #endif } + return ret; } htMain(int limit, int offset) { @@ -218,3 +218,9 @@ psycName() { pathName() { return regreplace(MYNICK, "#", "/", 1); } + +#ifdef _flag_save_userthreads_immediately +qSaveImmediately() { + return 1; +} +#endif diff --git a/world/net/twitter/client.c b/world/net/twitter/client.c index 79da8b2..e3a46ee 100644 --- a/world/net/twitter/client.c +++ b/world/net/twitter/client.c @@ -46,7 +46,7 @@ void status_update(string text) { fetch(ua, api_base_url + "/statuses/update.json", "POST", (["status": text])); } -#if 1 //not used, just an example +#if 0 //not used, just an example void parse_home_timeline(string body, string headers, int http_status) { P3(("twitter/client:parse_home_timeline(%O, %O, %O)\n", body, headers, http_status)) } diff --git a/world/static/examine.css b/world/static/examine.css index 77bc6cc..40e919d 100644 --- a/world/static/examine.css +++ b/world/static/examine.css @@ -45,7 +45,7 @@ body.threads, margin: 44; width: 562; } -.entry .title, +.entry .header, .ldpc { background: #f33; color: black; @@ -110,11 +110,11 @@ body.threads, width: 100%; } -.entry .title a { +.entry .header a { color: black; } -.entry .title .author {} -.entry .title .subject {} +.entry .header .author {} +.entry .header .title {} .entry .footer a, .entry .footer a:visited {