mirror of
git://git.psyced.org/git/psyced
synced 2024-08-15 03:25:10 +00:00
place/threads: use _log to store data, threaded comments
place/archetype.gen: new save log immediately flag to save() after logAppend(): - in place/name.c: define SAVE_LOG_IMMEDIATELY - in local.h: define _flag_save_place_log_immediately define _flag_save_userthreads_immediately place/text: added x()
This commit is contained in:
parent
3465bba9bb
commit
58bcc84430
12 changed files with 515 additions and 436 deletions
|
@ -1,7 +1,7 @@
|
||||||
/* identi.ca client, uses the twitter api
|
/* identi.ca client, uses the twitter api
|
||||||
* http://status.net/wiki/Twitter-compatible_API
|
* http://status.net/wiki/Twitter-compatible_API
|
||||||
*
|
*
|
||||||
* - register app @ http://identi.ca/settings/oauthapps
|
* - register app @ https://identi.ca/settings/oauthapps
|
||||||
* - then in local.h #define IDENTICA_KEY & IDENTICA_SECRET
|
* - then in local.h #define IDENTICA_KEY & IDENTICA_SECRET
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
@ -14,11 +14,11 @@ object load(object usr, string key, string secret, string request, string access
|
||||||
display_name = "identi.ca";
|
display_name = "identi.ca";
|
||||||
api_base_url = "http://identi.ca/api";
|
api_base_url = "http://identi.ca/api";
|
||||||
|
|
||||||
unless (consumer_key) consumer_key = IDENTICA_KEY;
|
consumer_key = IDENTICA_KEY;
|
||||||
unless (consumer_secret) consumer_secret = IDENTICA_SECRET;
|
consumer_secret = IDENTICA_SECRET;
|
||||||
unless (request_token_url) request_token_url = api_base_url + "/oauth/request_token";
|
request_token_url = api_base_url + "/oauth/request_token";
|
||||||
unless (access_token_url) access_token_url = api_base_url + "/oauth/access_token";
|
access_token_url = api_base_url + "/oauth/access_token";
|
||||||
unless (authorize_url) authorize_url = api_base_url + "/oauth/authorize";
|
authorize_url = api_base_url + "/oauth/authorize";
|
||||||
|
|
||||||
return ::load(usr, key, secret, request, access, authorize);
|
return ::load(usr, key, secret, request, access, authorize);
|
||||||
}
|
}
|
||||||
|
|
15
world/net/include/lastlog.h
Normal file
15
world/net/include/lastlog.h
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
#ifndef LASTLOG_H
|
||||||
|
#define LASTLOG_H
|
||||||
|
|
||||||
|
// _log fields
|
||||||
|
#define LOG_SOURCE 0
|
||||||
|
#define LOG_SOURCE_OBJ 0
|
||||||
|
#define LOG_SOURCE_UNI 1
|
||||||
|
#define LOG_MC 1
|
||||||
|
#define LOG_DATA 2
|
||||||
|
#define LOG_VARS 3
|
||||||
|
#define LOG_CHILDREN 4 //only added by place/threads:entries()
|
||||||
|
|
||||||
|
#define LOG_WIDTH 4
|
||||||
|
|
||||||
|
#endif
|
|
@ -13,6 +13,7 @@
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#include <net.h>
|
#include <net.h>
|
||||||
|
#include <lastlog.h>
|
||||||
|
|
||||||
protected array(mixed) _log;
|
protected array(mixed) _log;
|
||||||
|
|
||||||
|
@ -66,6 +67,7 @@ logInit(takeThis) {
|
||||||
}
|
}
|
||||||
|
|
||||||
logClip(maxlen, cutlen) {
|
logClip(maxlen, cutlen) {
|
||||||
|
P3(("logClip(%O, %O)\n", maxlen, cutlen))
|
||||||
int howmany;
|
int howmany;
|
||||||
|
|
||||||
howmany = sizeof(_log);
|
howmany = sizeof(_log);
|
||||||
|
@ -109,6 +111,7 @@ logView(a, showingLog, defAmount) {
|
||||||
mapping m;
|
mapping m;
|
||||||
|
|
||||||
ll = 0; for(i=0; i<sizeof(_log); i+=4) {
|
ll = 0; for(i=0; i<sizeof(_log); i+=4) {
|
||||||
|
unless (_log[i]) continue;
|
||||||
if (mappingp(m = _log[i+3])) if (
|
if (mappingp(m = _log[i+3])) if (
|
||||||
((text = _log[i+2]) && strstr(text, grep) >= 0)
|
((text = _log[i+2]) && strstr(text, grep) >= 0)
|
||||||
|| ((t = m["_nick"]) && strstr(t, grep) >= 0)
|
|| ((t = m["_nick"]) && strstr(t, grep) >= 0)
|
||||||
|
@ -145,6 +148,7 @@ logView(a, showingLog, defAmount) {
|
||||||
i = sizeof(_log) - ll;
|
i = sizeof(_log) - ll;
|
||||||
}
|
}
|
||||||
while (i < sizeof(_log)) {
|
while (i < sizeof(_log)) {
|
||||||
|
unless (_log[i]) { i+= 4; continue; }
|
||||||
#ifndef UNSAFE_LASTLOG
|
#ifndef UNSAFE_LASTLOG
|
||||||
msgView((pointerp(_log[i])
|
msgView((pointerp(_log[i])
|
||||||
? _log[i++][0] || _log[i-1][1]
|
? _log[i++][0] || _log[i-1][1]
|
||||||
|
@ -159,14 +163,16 @@ logView(a, showingLog, defAmount) {
|
||||||
return ll / 4;
|
return ll / 4;
|
||||||
}
|
}
|
||||||
|
|
||||||
// pick a single message. used by POP3
|
int logExists(int i) {
|
||||||
logPick(i) {
|
i *= 4;
|
||||||
|
if (i < 0 || i >= sizeof(_log) || !_log[i]) return 0;
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// pick a single message. used by POP3 & place/threads
|
||||||
|
array(mixed) logPick(int i) {
|
||||||
|
unless (logExists(i)) return 0;
|
||||||
i *= 4;
|
i *= 4;
|
||||||
if (i < 0) {
|
|
||||||
i = sizeof(_log) + i;
|
|
||||||
if (i < 0) return 0;
|
|
||||||
}
|
|
||||||
if (i > sizeof(_log)) return 0;
|
|
||||||
#ifndef UNSAFE_LASTLOG
|
#ifndef UNSAFE_LASTLOG
|
||||||
return ({ (pointerp(_log[i])
|
return ({ (pointerp(_log[i])
|
||||||
? _log[i++][0] || _log[i-1][1]
|
? _log[i++][0] || _log[i-1][1]
|
||||||
|
@ -177,8 +183,27 @@ logPick(i) {
|
||||||
#endif /* UNSAFE_LASTLOG */
|
#endif /* UNSAFE_LASTLOG */
|
||||||
}
|
}
|
||||||
|
|
||||||
|
varargs public int logSize(string mc) {
|
||||||
|
unless (mc) return sizeof(_log) / 4;
|
||||||
|
|
||||||
|
int i, n = 0;
|
||||||
|
for (i = 0; i < sizeof(_log); i += 4)
|
||||||
|
if (_log[i] && abbrev(mc, _log[i])) n++;
|
||||||
|
|
||||||
|
return n;
|
||||||
|
}
|
||||||
|
|
||||||
|
int logSet(int i, array(mixed) item) {
|
||||||
|
if (i < 0 || i > logSize()) return 0;
|
||||||
|
if (i == logSize()) {
|
||||||
|
_log += item;
|
||||||
|
} else {
|
||||||
|
i *= 4;
|
||||||
|
_log[i..i+3] = item[0..3];
|
||||||
|
}
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
// used to make a temporary copy of the log, in POP3
|
// used to make a temporary copy of the log, in POP3
|
||||||
public logQuery() { return _log; }
|
public logQuery() { return _log; }
|
||||||
|
|
||||||
public logSize() { return sizeof(_log) / 4; }
|
|
||||||
|
|
||||||
|
|
|
@ -6,12 +6,13 @@
|
||||||
// to make sure they won't trigger
|
// to make sure they won't trigger
|
||||||
// html commands
|
// html commands
|
||||||
//
|
//
|
||||||
string htquote(string s) {
|
varargs string htquote(string s, int newlines) {
|
||||||
ASSERT("htquote", stringp(s), s)
|
ASSERT("htquote", stringp(s), s)
|
||||||
s = replace(s, "&", "&");
|
s = replace(s, "&", "&");
|
||||||
// s = replace(s, "\"", """); //"
|
// s = replace(s, "\"", """); //"
|
||||||
s = replace(s, "<", "<");
|
s = replace(s, "<", "<");
|
||||||
s = replace(s, ">", ">");
|
s = replace(s, ">", ">");
|
||||||
|
if (newlines) s = replace(s, "\n", "<br>\n");
|
||||||
return s;
|
return s;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -80,7 +80,7 @@ private volatile mapping _sigs = ([
|
||||||
"_request_ent": ({ "_request_entry", 0, "_id" }),
|
"_request_ent": ({ "_request_entry", 0, "_id" }),
|
||||||
"_request_comment": ({ "_request_comment", 0, "_id", "_text" }),
|
"_request_comment": ({ "_request_comment", 0, "_id", "_text" }),
|
||||||
"_request_com": ({ "_request_comment", 0, "_id", "_text" }),
|
"_request_com": ({ "_request_comment", 0, "_id", "_text" }),
|
||||||
"_request_thread": ({ "_request_thread", 0, "_id", "_title" }),
|
"_request_title": ({ "_request_title", 0, "_id", "_title" }),
|
||||||
"_request_addentry": ({ "_request_addentry", 0, "_text" }),
|
"_request_addentry": ({ "_request_addentry", 0, "_text" }),
|
||||||
"_request_addent": ({ "_request_addentry", 0, "_text" }),
|
"_request_addent": ({ "_request_addentry", 0, "_text" }),
|
||||||
"_request_submit": ({ "_request_addentry", 0, "_text" }),
|
"_request_submit": ({ "_request_addentry", 0, "_text" }),
|
||||||
|
|
|
@ -172,3 +172,10 @@ varargs void w(string mc, string data, mixed vars) {
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
// a simple implementation of perl's x operator
|
||||||
|
string x(string str, int n) {
|
||||||
|
int i;
|
||||||
|
string res = "";
|
||||||
|
for (i = 0; i < n; i++) res += str;
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
|
@ -473,7 +473,7 @@ qDescription(source, vars, profile, itsme) {
|
||||||
foreach (string c : v("channels")) {
|
foreach (string c : v("channels")) {
|
||||||
object p = find_place(c);
|
object p = find_place(c);
|
||||||
unless (objectp(p) && (p->isPublic() || (source && p->qMember(source))) /*&& p->numEntries() > 0*/) continue;
|
unless (objectp(p) && (p->isPublic() || (source && p->qMember(source))) /*&& p->numEntries() > 0*/) continue;
|
||||||
channels += ([ p->qChannel(): p->entries(10)]);
|
channels += ([ p->qChannel(): p->entries(10, 0, 1)]);
|
||||||
}
|
}
|
||||||
// don't make_json for anonymous queries which are handled locally
|
// don't make_json for anonymous queries which are handled locally
|
||||||
dv["_channels"] = source ? make_json(channels) : channels;
|
dv["_channels"] = source ? make_json(channels) : channels;
|
||||||
|
|
|
@ -208,6 +208,22 @@ private volatile string _logfile;
|
||||||
qLogging() { return v("logging"); }
|
qLogging() { return v("logging"); }
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
int qSaveImmediately() {
|
||||||
|
#if defined(SAVE_LOG_IMMEDIATELY) || defined(_flag_save_place_log_immediately)
|
||||||
|
return 1;
|
||||||
|
#else
|
||||||
|
return 0;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
int qHistoryPersistentLimit() {
|
||||||
|
return _limit_amount_history_persistent;
|
||||||
|
}
|
||||||
|
|
||||||
|
int qHistoryExportLimit() {
|
||||||
|
return _limit_amount_history_export;
|
||||||
|
}
|
||||||
|
|
||||||
// to be overloaded by place.gen
|
// to be overloaded by place.gen
|
||||||
qNewsfeed() { return 0; }
|
qNewsfeed() { return 0; }
|
||||||
// _request_list_feature uses this in *all* place objects, dont ifdef
|
// _request_list_feature uses this in *all* place objects, dont ifdef
|
||||||
|
@ -561,8 +577,7 @@ htget(prot, query, headers, qs, data, noprocess) {
|
||||||
unless (noprocess) {
|
unless (noprocess) {
|
||||||
if (query["amount"]) {
|
if (query["amount"]) {
|
||||||
sscanf(query["amount"], "%d", a);
|
sscanf(query["amount"], "%d", a);
|
||||||
a = a < _limit_amount_history_export ? a :
|
a = a < qHistoryExportLimit() ? a : qHistoryExportLimit();
|
||||||
_limit_amount_history_export;
|
|
||||||
P4(("%O amount is %O\n", ME, a))
|
P4(("%O amount is %O\n", ME, a))
|
||||||
}
|
}
|
||||||
switch(query["format"]) {
|
switch(query["format"]) {
|
||||||
|
@ -741,7 +756,7 @@ insert_member(source, mc, data, vars, ni, neu, again) {
|
||||||
// NEW: if OWNERS have not been provided by place.gen
|
// NEW: if OWNERS have not been provided by place.gen
|
||||||
// we'll make the first guy who walks in our owner.
|
// we'll make the first guy who walks in our owner.
|
||||||
unless (v("owners")) {
|
unless (v("owners")) {
|
||||||
vSet("owners", ([ SNICKER: source ]));
|
vSet("owners", ([ lower_case(SNICKER): source ]));
|
||||||
// don't send _warning_place_duty_owner
|
// don't send _warning_place_duty_owner
|
||||||
// before acquitting enter operation..
|
// before acquitting enter operation..
|
||||||
vars["_duty"] = "owner"; // _owner_new ?
|
vars["_duty"] = "owner"; // _owner_new ?
|
||||||
|
@ -1515,6 +1530,7 @@ castmsg(source, mc, data, vars) {
|
||||||
# endif
|
# endif
|
||||||
logAppend(source, mc, data, vars, 0, "_time_place");
|
logAppend(source, mc, data, vars, 0, "_time_place");
|
||||||
_histChange++;
|
_histChange++;
|
||||||
|
if (qSaveImmediately()) save();
|
||||||
// cannot just call ::castmsg after logAppend because
|
// cannot just call ::castmsg after logAppend because
|
||||||
// logAppend adds the _time_place var so i have to
|
// logAppend adds the _time_place var so i have to
|
||||||
// patch around one way or the other
|
// patch around one way or the other
|
||||||
|
@ -1624,13 +1640,13 @@ void create() {
|
||||||
#ifdef PLACE_HISTORY
|
#ifdef PLACE_HISTORY
|
||||||
void reset(int again) {
|
void reset(int again) {
|
||||||
// ::reset(again);
|
// ::reset(again);
|
||||||
if (_histChange) {
|
if (_histChange) {
|
||||||
logClip(2 * _limit_amount_history_persistent,
|
if (qHistoryPersistentLimit())
|
||||||
_limit_amount_history_persistent);
|
logClip(2 * qHistoryPersistentLimit(), qHistoryPersistentLimit());
|
||||||
save();
|
save();
|
||||||
P2(("RESET: %O stores its history (+%O)\n",
|
P2(("RESET: %O stores its history (+%O)\n", ME, _histChange))
|
||||||
ME, _histChange))
|
}
|
||||||
}
|
|
||||||
_histChange = 0;
|
_histChange = 0;
|
||||||
#if 0 //ndef NOCLEANUP
|
#if 0 //ndef NOCLEANUP
|
||||||
// keep the server clean. unused places may exit.
|
// keep the server clean. unused places may exit.
|
||||||
|
@ -2310,7 +2326,7 @@ _request_set_style(source, mc, data, vars, b) {
|
||||||
string value = vars["_uniform_style"] || vars["_value"];
|
string value = vars["_uniform_style"] || vars["_value"];
|
||||||
if (value && (value = legal_url(value, "http")))
|
if (value && (value = legal_url(value, "http")))
|
||||||
vSet("_uniform_style", value);
|
vSet("_uniform_style", value);
|
||||||
else {
|
else if (value) {
|
||||||
sendmsg(source,
|
sendmsg(source,
|
||||||
"_error_illegal_scheme",
|
"_error_illegal_scheme",
|
||||||
"That is not a valid [_scheme] URL for a file.",
|
"That is not a valid [_scheme] URL for a file.",
|
||||||
|
@ -2583,6 +2599,10 @@ sAide(whom) {
|
||||||
int ret;
|
int ret;
|
||||||
mapping aides = v("aides") || ([]);
|
mapping aides = v("aides") || ([]);
|
||||||
|
|
||||||
|
// change local uniform to nick
|
||||||
|
array(mixed) u = parse_uniform(whom);
|
||||||
|
if (u && is_localhost(lower_case(u[UHost]))) whom = u[UResource];
|
||||||
|
|
||||||
t = lower_case(whom);
|
t = lower_case(whom);
|
||||||
if (aides[t]) {
|
if (aides[t]) {
|
||||||
aides -= ([ t ]);
|
aides -= ([ t ]);
|
||||||
|
@ -2612,7 +2632,8 @@ listAides(source) {
|
||||||
qAide(snicker, aidesonly) {
|
qAide(snicker, aidesonly) {
|
||||||
// never call with objectp.. use SNICKER
|
// never call with objectp.. use SNICKER
|
||||||
// if (objectp(whom)) whom = whom->qName();
|
// if (objectp(whom)) whom = whom->qName();
|
||||||
snicker = lower_case(snicker); // should we enforce SNICKER to be lc?
|
|
||||||
|
snicker = lower_case(snicker); // should we enforce SNICKER to be lc? yes!
|
||||||
if (!aidesonly && sizeof(v("owners")) && member(v("owners"), snicker)) return 4;
|
if (!aidesonly && sizeof(v("owners")) && member(v("owners"), snicker)) return 4;
|
||||||
unless (mappingp(v("aides"))) return 0;
|
unless (mappingp(v("aides"))) return 0;
|
||||||
return v("aides")[snicker];
|
return v("aides")[snicker];
|
||||||
|
|
|
@ -3,8 +3,10 @@
|
||||||
#include <net.h>
|
#include <net.h>
|
||||||
#include <person.h>
|
#include <person.h>
|
||||||
#include <status.h>
|
#include <status.h>
|
||||||
|
#include <lastlog.h>
|
||||||
|
|
||||||
inherit NET_PATH "place/owned";
|
#define PLACE_HISTORY
|
||||||
|
#define _limit_amount_history_persistent 0
|
||||||
|
|
||||||
#ifndef DEFAULT_BACKLOG
|
#ifndef DEFAULT_BACKLOG
|
||||||
# define DEFAULT_BACKLOG 10
|
# define DEFAULT_BACKLOG 10
|
||||||
|
@ -14,60 +16,159 @@ inherit NET_PATH "place/owned";
|
||||||
# define STYLESHEET (v("_uniform_style") || "/static/examine.css")
|
# define STYLESHEET (v("_uniform_style") || "/static/examine.css")
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// datenstruktur für threads?
|
inherit NET_PATH "place/owned";
|
||||||
//
|
|
||||||
// bestehende struktur ist: großes array von entries.
|
|
||||||
//
|
|
||||||
// wie wärs mit mapping mit key=threadname und value=array-of-entries
|
|
||||||
// subjects werden abgeschafft: sie sind der name des threads
|
|
||||||
// wer einen thread in seinem reply umnennen will legt in wirklichkeit
|
|
||||||
// einen neuen thread an, meinetwegen mit "was: old thread"
|
|
||||||
//
|
|
||||||
// der nachteil an solch einer struktur wäre, dass man neue comments
|
|
||||||
// in alten threads nicht so schnell findet - man ist auf die notification
|
|
||||||
// angewiesen, was andererseits die stärke von psycblogs ist.
|
|
||||||
// man könnte die notifications zudem noch in die history einspeisen..
|
|
||||||
//
|
|
||||||
// nachteile an der bestehenden struktur ist: 1. threadname in jeder
|
|
||||||
// entry, 2. threads nur mittels durchlauf des ganzen blogs darstellbar
|
|
||||||
//
|
|
||||||
// momentmal.. das was du "comments" nennst sind doch schon die threads!
|
|
||||||
|
|
||||||
protected mapping* _thread;
|
qHistoryPersistentLimit() {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
volatile int last_modified;
|
canPost(snicker) {
|
||||||
volatile string webact;
|
return qAide(snicker);
|
||||||
|
}
|
||||||
|
|
||||||
|
canDeleteOwn(snicker) {
|
||||||
|
return qAide(snicker);
|
||||||
|
}
|
||||||
|
|
||||||
|
canDeleteEverything(snicker) {
|
||||||
|
return qOwner(snicker);
|
||||||
|
}
|
||||||
|
|
||||||
|
int mayLog(string mc) {
|
||||||
|
return abbrev("_notice_thread", mc) || abbrev("_message", mc);
|
||||||
|
}
|
||||||
|
|
||||||
|
int showWebLog() {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int numEntries() {
|
||||||
|
return logSize("_notice_thread");
|
||||||
|
}
|
||||||
|
|
||||||
create() {
|
create() {
|
||||||
P3((">> threads:create()\n"))
|
P3((">> threads:create()\n"))
|
||||||
::create();
|
::create();
|
||||||
unless (pointerp(_thread)) _thread = ({ });
|
|
||||||
|
//index entries from 1
|
||||||
|
logSet(0, ({0, 0, 0, 0}));
|
||||||
|
}
|
||||||
|
|
||||||
|
varargs array(mixed) entries(int limit, int offset, int reverse, int parent, int id) {
|
||||||
|
P3((">> entries(%O, %O, %O)\n", limit, offset, parent))
|
||||||
|
array(mixed) entries = ({}), entry, children, child;
|
||||||
|
mapping vars;
|
||||||
|
int i, n = 0, o = 0;
|
||||||
|
int from = id || logSize() - 1;
|
||||||
|
int to = id || parent || 0;
|
||||||
|
for (i = from; i >= to; i--) {
|
||||||
|
unless (logPick(i)) continue;
|
||||||
|
entry = logPick(i);
|
||||||
|
unless (abbrev("_notice_thread", entry[LOG_MC])) continue;
|
||||||
|
PT((">>> entry %O: %O\n", i, entry))
|
||||||
|
vars = entry[LOG_VARS];
|
||||||
|
if (vars["_parent"] != parent) continue;
|
||||||
|
if (o++ < offset) continue;
|
||||||
|
children = ({});
|
||||||
|
if (member(vars, "_children")) {
|
||||||
|
foreach (int c : vars["_children"]) {
|
||||||
|
if (child = logPick(c)) {
|
||||||
|
children += ({ child + ({ entries(0, 0, reverse, c) }) });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
PT((">>> adding %O: %O\n", i, entry))
|
||||||
|
if (reverse) {
|
||||||
|
entries += ({ entry + ({ children }) });
|
||||||
|
} else {
|
||||||
|
entries = ({ entry + ({ children }) }) + entries;
|
||||||
|
}
|
||||||
|
if (limit && ++n >= limit) break;
|
||||||
|
}
|
||||||
|
PT((">>> entries: %O\n", entries))
|
||||||
|
return entries;
|
||||||
|
}
|
||||||
|
|
||||||
|
varargs array(mixed) entry(int id) {
|
||||||
|
return entries(0, 0, 0, 0, id);
|
||||||
|
}
|
||||||
|
|
||||||
|
varargs int addEntry(mixed source, string snicker, string text, string title, int parent_id) {
|
||||||
|
P3((">> addEntry(%O, %O, %O, %O, %O)\n", source, snicker, text, title, parent_id))
|
||||||
|
int id = logSize();
|
||||||
|
string mc = "_notice_thread_entry";
|
||||||
|
string data = "[_nick] [_action]: ";
|
||||||
|
|
||||||
|
mapping vars = ([
|
||||||
|
"_id": id,
|
||||||
|
"_text": text,
|
||||||
|
"_nick": snicker,
|
||||||
|
"_action": "adds", //TODO: add a /set'ting for it, or find a better name
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (parent_id) {
|
||||||
|
P3((">>> parent_id: %O\n", parent_id))
|
||||||
|
array(mixed) parent;
|
||||||
|
unless (parent = logPick(parent_id)) return 0;
|
||||||
|
P3((">>> parent: %O\n", parent))
|
||||||
|
unless (parent[LOG_VARS]["_children"]) parent[LOG_VARS]["_children"] = ({ });
|
||||||
|
parent[LOG_VARS]["_children"] += ({ id });
|
||||||
|
save();
|
||||||
|
|
||||||
|
mc += "_reply";
|
||||||
|
data = member(parent[LOG_VARS], "_title") ?
|
||||||
|
"[_nick] [_action] in reply to #[_parent] ([_parent_title]): " :
|
||||||
|
"[_nick] [_action] in reply to #[_parent]: ",
|
||||||
|
vars += ([ "_parent": parent_id ]);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (title && strlen(title)) {
|
||||||
|
vars += ([ "_title": title ]);
|
||||||
|
data += "[_title]\n[_text]";
|
||||||
|
} else {
|
||||||
|
data += "[_text]";
|
||||||
|
}
|
||||||
|
|
||||||
|
data += " (#[_id] in [_nick_place])";
|
||||||
|
|
||||||
|
castmsg(source, mc, data, vars);
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int delEntry(int id, mixed source, mapping vars) {
|
||||||
|
array(mixed) entry;
|
||||||
|
unless (entry = logPick(id)) return 0;
|
||||||
|
|
||||||
|
string unick;
|
||||||
|
unless (canDeleteEverything(SNICKER))
|
||||||
|
unless (canDeleteOwn(SNICKER) && lower_case(psyc_name(source)) == lower_case(entry[LOG_SOURCE][LOG_SOURCE_UNI]))
|
||||||
|
return 0;
|
||||||
|
|
||||||
|
logSet(id, ({0,0,0,0}));
|
||||||
|
save();
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
sendEntries(mixed source, array(mixed) entries, int level) {
|
||||||
|
P3((">> sendEntries(%O, %O)\n", source, entries))
|
||||||
|
mapping vars;
|
||||||
|
int n = 0;
|
||||||
|
unless(source && entries) return n;
|
||||||
|
foreach(array(mixed) entry : entries) {
|
||||||
|
PT(("entry: %O\n", entry))
|
||||||
|
vars = entry[LOG_VARS];
|
||||||
|
sendmsg(source, regreplace(entry[LOG_MC], "^_notice", "_list", 1),
|
||||||
|
"[_indent][_nick]: " + (vars["_title"] ? "[_title]\n" : "") + "[_text] (#[_id])",
|
||||||
|
vars + ([ "_level": level, "_indent": x(" ", level) ]));
|
||||||
|
if (sizeof(entry) >= LOG_CHILDREN + 1) sendEntries(source, entry[LOG_CHILDREN], level + 1);
|
||||||
|
n++;
|
||||||
|
}
|
||||||
|
return n;
|
||||||
}
|
}
|
||||||
|
|
||||||
_request_entries(source, mc, data, vars, b) {
|
_request_entries(source, mc, data, vars, b) {
|
||||||
int num = to_int(vars["_num"]) || DEFAULT_BACKLOG;
|
int num = to_int(vars["_num"]) || DEFAULT_BACKLOG;
|
||||||
array(mapping) entries = ({ });
|
sendEntries(source, entries(num));
|
||||||
mapping entry;
|
|
||||||
|
|
||||||
for (int i = sizeof(_thread) - 1; i >= 0; i--) {
|
|
||||||
unless (entry = _thread[i]) continue;
|
|
||||||
entries =
|
|
||||||
({ ([
|
|
||||||
"_sep" : strlen(entry["thread"]) ? " - " : "",
|
|
||||||
"_thread" : entry["thread"],
|
|
||||||
"_text" : entry["text"],
|
|
||||||
"_author" : entry["author"],
|
|
||||||
"_date" : entry["date"],
|
|
||||||
"_comments": sizeof(entry["comments"]),
|
|
||||||
"_id" : i,
|
|
||||||
"_nick_place" : MYNICK,
|
|
||||||
]) }) + entries;
|
|
||||||
if (sizeof(entries) == num) break;
|
|
||||||
}
|
|
||||||
foreach(entry : entries)
|
|
||||||
sendmsg(source, "_list_thread_entry",
|
|
||||||
"#[_id] - [_author][_sep][_thread]: [_text] ([_comments])",
|
|
||||||
entry);
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -78,60 +179,29 @@ _request_entry(source, mc, data, vars, b) {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
mapping entry;
|
|
||||||
int id = to_int(vars["_id"]);
|
int id = to_int(vars["_id"]);
|
||||||
|
unless(sendEntries(source, entry(id))) {
|
||||||
if (id >= 0 && id < sizeof(_thread))
|
|
||||||
entry = _thread[id];
|
|
||||||
|
|
||||||
unless (entry) {
|
|
||||||
sendmsg(source, "_error_thread_invalid_entry",
|
sendmsg(source, "_error_thread_invalid_entry",
|
||||||
"#[_id]: no such entry", (["_id": id]));
|
"#[_id]: no such entry", (["_id": id]));
|
||||||
return 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
sendmsg(source, "_list_thread_entry",
|
|
||||||
"#[_id] [_author][_sep][_thread]: [_text] ([_comments])",
|
|
||||||
([
|
|
||||||
"_sep" : strlen(entry["thread"]) ? " - " : "",
|
|
||||||
"_thread" : entry["thread"],
|
|
||||||
"_text" : entry["text"],
|
|
||||||
"_author" : entry["author"],
|
|
||||||
"_date" : entry["date"],
|
|
||||||
"_comments": sizeof(entry["comments"]),
|
|
||||||
"_id" : id,
|
|
||||||
"_nick_place" : MYNICK ]) );
|
|
||||||
|
|
||||||
if (entry["comments"]) {
|
|
||||||
foreach(mapping item : entry["comments"]) {
|
|
||||||
sendmsg(source, "_list_thread_comment",
|
|
||||||
"> [_nick]: [_text]",
|
|
||||||
([
|
|
||||||
"_nick" : item["nick"],
|
|
||||||
"_text" : item["text"],
|
|
||||||
"_date": item["date"],
|
|
||||||
"_nick_place" : MYNICK ]) );
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
_request_thread(source, mc, data, vars, b) {
|
_request_addentry(source, mc, data, vars, b) {
|
||||||
unless (vars["_id"] && strlen(vars["_id"])) {
|
P3((">> _request_addentry(%O, %O, %O, %O, %O)\n", source, mc, data, vars, b))
|
||||||
sendmsg(source, "_warning_usage_thread",
|
unless (canPost(SNICKER)) return 0;
|
||||||
"Usage: /thread <id> <title>", ([ ]));
|
unless (vars["_text"] && strlen(vars["_text"])) {
|
||||||
|
sendmsg(source, "_warning_usage_addentry",
|
||||||
|
"Usage: /addentry <text>", ([ ]));
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
addEntry(source, SNICKER, vars["_text"], vars["_title"]);
|
||||||
int id = to_int(vars["_id"]);
|
|
||||||
unless (setSubject(id, vars["_title"]))
|
|
||||||
sendmsg(source, "_error_thread_invalid_entry",
|
|
||||||
"#[_id]: no such entry", (["_id": id]));
|
|
||||||
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
_request_comment(source, mc, data, vars, b) {
|
_request_comment(source, mc, data, vars, b) {
|
||||||
|
P3((">> _request_comment(%O, %O, %O, %O, %O)\n", source, mc, data, vars, b))
|
||||||
unless (vars["_id"] && strlen(vars["_id"]) &&
|
unless (vars["_id"] && strlen(vars["_id"]) &&
|
||||||
vars["_text"] && strlen(vars["_text"])) {
|
vars["_text"] && strlen(vars["_text"])) {
|
||||||
sendmsg(source, "_warning_usage_reply",
|
sendmsg(source, "_warning_usage_reply",
|
||||||
|
@ -140,25 +210,17 @@ _request_comment(source, mc, data, vars, b) {
|
||||||
}
|
}
|
||||||
|
|
||||||
int id = to_int(vars["_id"]);
|
int id = to_int(vars["_id"]);
|
||||||
unless (addComment(vars["_text"], SNICKER, id))
|
string snicker = SNICKER;
|
||||||
|
P3((">>> id: %O, vars: %O\n", id, vars));
|
||||||
|
unless (addEntry(source, snicker, vars["_text"], vars["_title"], id))
|
||||||
sendmsg(source, "_error_thread_invalid_entry",
|
sendmsg(source, "_error_thread_invalid_entry",
|
||||||
"#[_id]: no such entry", (["_id": id]));
|
"#[_id]: no such entry", (["_id": id]));
|
||||||
|
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
_request_addentry(source, mc, data, vars, b) {
|
|
||||||
unless (canPost(SNICKER)) return 0;
|
|
||||||
unless (vars["_text"] && strlen(vars["_text"])) {
|
|
||||||
sendmsg(source, "_warning_usage_addentry",
|
|
||||||
"Usage: /addentry <text>", ([ ]));
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
addEntry(vars["_text"], SNICKER);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
_request_delentry(source, mc, data, vars, b) {
|
_request_delentry(source, mc, data, vars, b) {
|
||||||
|
P3((">> _request_delentry(%O, %O, %O, %O, %O)\n", source, mc, data, vars, b))
|
||||||
unless (canPost(SNICKER)) return 0;
|
unless (canPost(SNICKER)) return 0;
|
||||||
unless (vars["_id"] && strlen(vars["_id"])) {
|
unless (vars["_id"] && strlen(vars["_id"])) {
|
||||||
sendmsg(source, "_warning_usage_delentry",
|
sendmsg(source, "_warning_usage_delentry",
|
||||||
|
@ -177,6 +239,24 @@ _request_delentry(source, mc, data, vars, b) {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#if 0
|
||||||
|
_request_title(source, mc, data, vars, b) {
|
||||||
|
P3((">> _request_title(%O, %O, %O, %O, %O)\n", source, mc, data, vars, b))
|
||||||
|
unless (vars["_id"] && strlen(vars["_id"])) {
|
||||||
|
sendmsg(source, "_warning_usage_title",
|
||||||
|
"Usage: /title <id> <title>", ([ ]));
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int id = to_int(vars["_id"]);
|
||||||
|
unless (setTitle(id, vars["_title"]))
|
||||||
|
sendmsg(source, "_error_thread_invalid_entry",
|
||||||
|
"#[_id]: no such entry", (["_id": id]));
|
||||||
|
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
msg(source, mc, data, vars){
|
msg(source, mc, data, vars){
|
||||||
P3(("thread:msg(%O, %O, %O, %O)", source, mc, data, vars))
|
P3(("thread:msg(%O, %O, %O, %O)", source, mc, data, vars))
|
||||||
// TODO: die source muss hierbei uebereinstimmen mit dem autor
|
// TODO: die source muss hierbei uebereinstimmen mit dem autor
|
||||||
|
@ -192,81 +272,174 @@ msg(source, mc, data, vars){
|
||||||
return ::msg(source, mc, data, vars);
|
return ::msg(source, mc, data, vars);
|
||||||
}
|
}
|
||||||
|
|
||||||
setSubject(id, thread) {
|
varargs string htmlComments(array(mixed) entries, int level) {
|
||||||
unless (_thread && id >= 0 && id <= sizeof(_thread) && _thread[id]) return 0;
|
mapping entry, vars;
|
||||||
_thread[id]["thread"] = thread;
|
string ht = "", style;
|
||||||
save();
|
foreach(entry : entries) {
|
||||||
return 1;
|
vars = entry[LOG_VARS];
|
||||||
}
|
style = level ? "style='padding-left: " + level + "em'" : "";
|
||||||
|
ht += "<div class='comment' title='" + isotime(ctime(vars["_time_place"]), 1) + "' " + style + "><span class='comment-author'>" + vars["_nick"] + "</span>: <span class='comment-text'>" + htquote(vars["_text"], 1) + "</span></div>\n";
|
||||||
// TODO: topic uebergeben
|
if (sizeof(entry) >= LOG_CHILDREN + 1) ht += htmlComments(entry[LOG_CHILDREN], level + 1);
|
||||||
addEntry(text, unick, thread) {
|
|
||||||
int id = sizeof(_thread);
|
|
||||||
mapping newentry = ([
|
|
||||||
"id": id,
|
|
||||||
"text": text,
|
|
||||||
"author": unick,
|
|
||||||
"date": time(),
|
|
||||||
"thread": thread || "",
|
|
||||||
]);
|
|
||||||
_thread += ({ newentry });
|
|
||||||
save();
|
|
||||||
castmsg(ME, "_notice_thread_entry",
|
|
||||||
thread ?
|
|
||||||
"[_nick] adds an entry in [_nick_place] (#[_id]): \"[_thread]\":\n[_entry]" :
|
|
||||||
"[_nick] adds an entry in [_nick_place] (#[_id]):\n[_entry]",
|
|
||||||
([
|
|
||||||
"_entry": text,
|
|
||||||
"_id": id,
|
|
||||||
"_thread": thread,
|
|
||||||
"_nick": unick,
|
|
||||||
]));
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
addComment(text, unick, id) {
|
|
||||||
mapping entry;
|
|
||||||
unless (_thread && id >= 0 && id <= sizeof(_thread) && _thread[id]) return 0;
|
|
||||||
|
|
||||||
entry = _thread[id];
|
|
||||||
unless (entry["comments"]) {
|
|
||||||
entry["comments"] = ({ });
|
|
||||||
}
|
}
|
||||||
int date = time();
|
return ht;
|
||||||
entry["comments"] += ({ (["text" : text, "nick" : unick, "date": date ]) });
|
|
||||||
// vSet("entries", entries);
|
|
||||||
save();
|
|
||||||
castmsg(ME, "_notice_thread_comment",
|
|
||||||
entry["thread"] && strlen(entry["thread"]) ?
|
|
||||||
"[_nick] adds a comment to \"[_thread]\" (entry #[_id]) of [_nick_place]:\n[_comment]" :
|
|
||||||
"[_nick] adds a comment to entry #[_id] of [_nick_place]:\n[_comment]",
|
|
||||||
([
|
|
||||||
"_entry" : entry["text"],
|
|
||||||
"_id" : id,
|
|
||||||
"_thread" : entry["thread"],
|
|
||||||
"_comment" : text,
|
|
||||||
"_nick" : unick,
|
|
||||||
"_date": date,
|
|
||||||
]));
|
|
||||||
return 1;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
delEntry(int id, source, vars) {
|
varargs string htmlEntries(array(mixed) entries, int nojs, string chan, string submit, string url_prefix) {
|
||||||
unless (_thread && id >= 0 && id <= sizeof(_thread) && _thread[id]) return 0;
|
P3((">> threads:htmlentries(%O, %O, %O, %O, %O)\n", entries, nojs, chan, submit, url_prefix))
|
||||||
|
string text, ht = "";
|
||||||
|
string id_prefix = chan ? chan + "-" : "";
|
||||||
|
unless (url_prefix) url_prefix = "";
|
||||||
|
unless (nojs) ht +=
|
||||||
|
"<script type='text/javascript'>\n"
|
||||||
|
"function toggle(e) { if (typeof e == 'string') e = document.getElementById(e); e.className = e.className.match('hidden') ? e.className.replace(/ *hidden/, '') : e.className + ' hidden'; }\n"
|
||||||
|
"</script>\n";
|
||||||
|
|
||||||
array(string) entries, authors, a;
|
mapping entry, vars;
|
||||||
string unick;
|
foreach (entry : entries) {
|
||||||
|
P3((">>> entry: %O\n", entry))
|
||||||
|
vars = entry[LOG_VARS];
|
||||||
|
|
||||||
if (canPost(unick = lower_case(SNICKER))) {
|
text = htquote(vars["_text"], 1);
|
||||||
unless (lower_case(_thread[id]["author"]) == unick) return 0;
|
|
||||||
|
string comments = "";
|
||||||
|
if (sizeof(entry) >= LOG_CHILDREN + 1) comments = htmlComments(entry[LOG_CHILDREN]);
|
||||||
|
|
||||||
|
ht +=
|
||||||
|
"<div class='entry'>\n"
|
||||||
|
"<div class='header'>\n"
|
||||||
|
"<a href=\"" + url_prefix + "?id=" + vars["_id"] + "\">"
|
||||||
|
"<span class='id'>#" + vars["_id"] + "</span> - \n"
|
||||||
|
"<span class='author'>" + vars["_nick"] + "</span>\n"
|
||||||
|
+ (vars["_title"] && strlen(vars["_title"]) ? " - " : "") +
|
||||||
|
"<span class='title'>" + htquote(vars["_title"] || "") + "</span>\n"
|
||||||
|
"</a>"
|
||||||
|
"</div>\n"
|
||||||
|
"<div class='body'>\n"
|
||||||
|
"<div class='text'>" + text + "</div>\n"
|
||||||
|
"<div id='comments-" + id_prefix + vars["_id"] + "' class='comments'>" + comments +
|
||||||
|
(submit && strlen(submit) ?
|
||||||
|
"<a onclick=\"toggle(this.nextSibling)\">» reply</a>"
|
||||||
|
"<div class='comment-submit hidden'>"
|
||||||
|
"<textarea autocomplete='off'></textarea>"
|
||||||
|
//FIXME: cmd is executed twice, because after a set-cookie it's parsed again
|
||||||
|
"<input type='button' value='Send' onclick=\"cmd('comment " + vars["_id"] + " '+ this.previousSibling.value, '" + submit + "')\">"
|
||||||
|
"</div>" : "") +
|
||||||
|
"</div>\n"
|
||||||
|
"</div>\n"
|
||||||
|
"<div class='footer'>\n"
|
||||||
|
"<span class='date'>" + isotime(ctime(vars["_time_place"]), 1) + "</span>\n"
|
||||||
|
"<span class='comments-link'>"
|
||||||
|
"<a onclick=\"toggle('comments-" + id_prefix + vars["_id"] + "')\">" + sizeof(vars["_children"]) + " comments</a>"
|
||||||
|
"</span>\n"
|
||||||
|
"</div>\n"
|
||||||
|
"</div>\n";
|
||||||
|
}
|
||||||
|
P3((">>> ht: %O\n", ht))
|
||||||
|
return "<div class='threads'>" + ht + "</div>";
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: fix markup, not displayed correctly (in firefox at least)
|
||||||
|
string rssEntries(array(mixed) entries) {
|
||||||
|
string rss =
|
||||||
|
"<?xml version=\"1.0\" encoding=\"" SYSTEM_CHARSET "\" ?>\n"
|
||||||
|
"<rdf:RDF\n"
|
||||||
|
"xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n"
|
||||||
|
"xmlns=\"http://purl.org/rss/1.0/\">\n\n"
|
||||||
|
"<channel>\n"
|
||||||
|
"\t<title>PSYC - Protocol for Synchronous Conferencing</title>\n"
|
||||||
|
"\t<link>http://www.psyc.eu</link>\n"
|
||||||
|
"\t<description>News about the PSYC project</description>\n"
|
||||||
|
"</channel>\n";
|
||||||
|
|
||||||
|
mapping entry, vars;
|
||||||
|
foreach (entry : entries) {
|
||||||
|
vars = entry[LOG_VARS];
|
||||||
|
rss +=
|
||||||
|
"\n<item>\n"
|
||||||
|
"\t<title>"+ (vars["_title"] || "no title") +"</title>\n"
|
||||||
|
"\t<link>http://" + HTTP_OR_HTTPS_URL + "/" + pathName() + "?id=" + vars["_id"] + "</link>\n"
|
||||||
|
"\t<description>" + vars["_text"] + "</description>\n"
|
||||||
|
"\t<dc:date>" + isotime(ctime(vars["_time_place"]), 1) + "</dc:date>\n"
|
||||||
|
"\t<dc:creator>" + vars["_nick"] + "</dc:creator>\n"
|
||||||
|
"</item>\n";
|
||||||
}
|
}
|
||||||
|
|
||||||
//_thread = _thread[0..id-1] + _thread[id+1..];
|
rss += "</rdf:RDF>\n";
|
||||||
// set to 0 instead so entry ids won't change
|
return rss;
|
||||||
_thread[id] = 0;
|
}
|
||||||
save();
|
|
||||||
|
|
||||||
return 1;
|
string jsEntries(array(mixed) entries) {
|
||||||
|
string js =
|
||||||
|
"function Entry(id, thread, author, date, text) {\n"
|
||||||
|
"\tthis.id = id;\n"
|
||||||
|
"\tthis.thread = thread;\n"
|
||||||
|
"\tthis.author = author;\n"
|
||||||
|
"\tthis.date = date;\n"
|
||||||
|
"\tthis.text = text;\n"
|
||||||
|
"}\n\n"
|
||||||
|
"document.blogentries = new Array(\n";
|
||||||
|
|
||||||
|
mapping entry, vars;
|
||||||
|
foreach (entry : entries) {
|
||||||
|
vars = entry[LOG_VARS];
|
||||||
|
js += "new Entry(" + vars["_id"] + ","
|
||||||
|
"\"" + vars["_title"] + "\","
|
||||||
|
"\"" + vars["_nick"] + "\","
|
||||||
|
+ isotime(ctime(vars["_time_place"]), 1) + ","
|
||||||
|
"\"" + vars["_text"] + "\"),\n";
|
||||||
|
}
|
||||||
|
|
||||||
|
return js[..<3] + ");";
|
||||||
|
}
|
||||||
|
|
||||||
|
varargs string jsonEntries(int limit, int offset) {
|
||||||
|
return make_json(entries(limit, offset));
|
||||||
|
}
|
||||||
|
|
||||||
|
varargs void jsonExport(int limit, int offset) {
|
||||||
|
write(jsonEntries(limit, offset));
|
||||||
|
}
|
||||||
|
|
||||||
|
varargs void jsExport(int limit, int offset) {
|
||||||
|
write(jsEntries(entries(limit, offset)));
|
||||||
|
}
|
||||||
|
|
||||||
|
varargs void rssExport(int limit, int offset) {
|
||||||
|
write(rssEntries(entries(limit, offset, 1)));
|
||||||
|
}
|
||||||
|
|
||||||
|
varargs string htMain(int limit, int offset, string chan) {
|
||||||
|
return htmlEntries(entries(limit, offset, 1), 0, chan);
|
||||||
|
}
|
||||||
|
|
||||||
|
varargs void displayMain(int limit, int offset) {
|
||||||
|
write(htMain(limit, offset));
|
||||||
|
}
|
||||||
|
|
||||||
|
string htEntry(int id) {
|
||||||
|
return htmlEntries(entry(id));
|
||||||
|
}
|
||||||
|
|
||||||
|
void displayEntry(int id) {
|
||||||
|
write(htEntry(id) || "No such entry.");
|
||||||
|
}
|
||||||
|
|
||||||
|
// wir können zwei strategien fahren.. die technisch einfachere ist es
|
||||||
|
// die reihenfolge der elemente festzulegen und für jedes ein w(_HTML_xy
|
||||||
|
// auszuspucken. flexibler wär's stattdessen wenn jede seite ein einziges
|
||||||
|
// w(_PAGES_xy ausgeben würde in dem es per [_HTML_list_threads] oder
|
||||||
|
// ähnlichem die blog-elemente per psyctext-vars übergibt ... dann kann
|
||||||
|
// es immernoch per {_HTML_head_threads} header und footer einheitlich
|
||||||
|
// halten. womöglich kann man auch nachträglich plan A in plan B
|
||||||
|
// umwandeln..... hmmm -lynX
|
||||||
|
//
|
||||||
|
void displayHeader() {
|
||||||
|
w("_HTML_head_threads",
|
||||||
|
"<html><head><link rel='stylesheet' type='text/css' href='"+ STYLESHEET +"'></head>\n"+
|
||||||
|
"<body class='threads'>\n\n");
|
||||||
|
}
|
||||||
|
void displayFooter() {
|
||||||
|
w("_HTML_tail_threads", "</body></html>");
|
||||||
}
|
}
|
||||||
|
|
||||||
htget(prot, query, headers, qs, data) {
|
htget(prot, query, headers, qs, data) {
|
||||||
|
@ -276,8 +449,7 @@ htget(prot, query, headers, qs, data) {
|
||||||
int a;
|
int a;
|
||||||
int limit = to_int(query["limit"]) || DEFAULT_BACKLOG;
|
int limit = to_int(query["limit"]) || DEFAULT_BACKLOG;
|
||||||
int offset = to_int(query["offset"]);
|
int offset = to_int(query["offset"]);
|
||||||
|
string webact = PLACE_PATH + MYLOWERNICK;
|
||||||
unless (webact) webact = PLACE_PATH + MYLOWERNICK;
|
|
||||||
// shouldnt it be "html" here?
|
// shouldnt it be "html" here?
|
||||||
sTextPath(query["layout"] || MYNICK, query["lang"], "ht");
|
sTextPath(query["layout"] || MYNICK, query["lang"], "ht");
|
||||||
|
|
||||||
|
@ -382,7 +554,7 @@ htget(prot, query, headers, qs, data) {
|
||||||
rssExport(limit, offset);
|
rssExport(limit, offset);
|
||||||
} else {
|
} else {
|
||||||
// normaler Export
|
// normaler Export
|
||||||
P2(("all entries: %O\n", _thread))
|
//P2(("all entries: %O\n", _thread))
|
||||||
htok3(prot, "text/html", "Cache-Control: no-cache\n");
|
htok3(prot, "text/html", "Cache-Control: no-cache\n");
|
||||||
displayHeader();
|
displayHeader();
|
||||||
// display the blog
|
// display the blog
|
||||||
|
@ -395,243 +567,75 @@ htget(prot, query, headers, qs, data) {
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
entries(int limit, int offset) {
|
void nntpget(string cmd, string args) {
|
||||||
array(mapping) entries = ({ });
|
array(mixed) entry, entries;
|
||||||
int i, n = 0, o = 0;
|
mapping vars;
|
||||||
for (i = sizeof(_thread) - 1; i >= 0; i--) {
|
int i;
|
||||||
P3((">>> _thread[%O]: %O\n", i, _thread[i]))
|
P2(("calling nntpget %s with %O\n", cmd, args))
|
||||||
unless (_thread[i]) continue;
|
switch(cmd) {
|
||||||
if (o++ < offset) continue;
|
case "LIST":
|
||||||
entries += ({ _thread[i] });
|
write(MYNICK + " 0 1 n\n");
|
||||||
if (++n >= limit) break;
|
break;
|
||||||
|
case "ARTICLE":
|
||||||
|
i = to_int(args) - 1;
|
||||||
|
//P2(("i is: %d\n", i))
|
||||||
|
unless (entry = entry(i)) break;
|
||||||
|
vars = entry[LOG_VARS];
|
||||||
|
write(S("220 %d <%s%d@%s> article\n",
|
||||||
|
i + 1, MYNICK, i + 1, SERVER_HOST));
|
||||||
|
write(S("From: %s\n", vars["_nick"]));
|
||||||
|
write(S("Newsgroups: %s\n", MYNICK));
|
||||||
|
write(S("Subject: %s\n", vars["_title"]));
|
||||||
|
write(S("Date: %s\n", isotime(ctime(vars["_time_place"]), 1)));
|
||||||
|
write(S("Xref: %s %s:%d\n", SERVER_HOST, MYNICK, i + 1));
|
||||||
|
write(S("Message-ID: <%s$%d@%s>\n", MYNICK, i+1, SERVER_HOST));
|
||||||
|
write("\n");
|
||||||
|
write(vars["_text"]);
|
||||||
|
write("\n.\n");
|
||||||
|
break;
|
||||||
|
case "GROUP":
|
||||||
|
write(S("211 %d 1 %d %s\n", numEntries(), numEntries(), MYNICK));
|
||||||
|
break;
|
||||||
|
case "XOVER":
|
||||||
|
entries = entries();
|
||||||
|
foreach (entry : entries) {
|
||||||
|
unless (entry = entry(i)) break;
|
||||||
|
vars = entry[LOG_VARS];
|
||||||
|
write(S("%d\t%s\t%s\t%s <%s%d@%s>\t1609\t22\tXref: news.t-online.com\t%s:%d\n",
|
||||||
|
i+1, vars["_title"],
|
||||||
|
vars["_nick"], isotime(ctime(vars["_time_place"]), 1),
|
||||||
|
MYNICK, i+1,
|
||||||
|
SERVER_HOST, MYNICK, i+1));
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
P2(("unimplemented nntp command: %s\n", cmd))
|
||||||
}
|
}
|
||||||
return entries;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
htmlEntries(array(mapping) entries, int nojs, string chan, string submit, string url_prefix) {
|
|
||||||
P3((">> threads:htmlentries(%O, %O, %O, %O)\n", entries, nojs, chan, submit))
|
|
||||||
string t, ht = "";
|
|
||||||
string id_prefix = chan ? chan + "-" : "";
|
|
||||||
unless(url_prefix) url_prefix = "";
|
|
||||||
unless (nojs) ht +=
|
|
||||||
"<script type='text/javascript'>\n"
|
|
||||||
"function toggle(e) { if (typeof e == 'string') e = document.getElementById(e); e.className = e.className.match('hidden') ? e.className.replace(/ *hidden/, '') : e.className + ' hidden'; }\n"
|
|
||||||
"</script>\n";
|
|
||||||
|
|
||||||
|
|
||||||
foreach (mapping entry : entries) {
|
|
||||||
P3((">>> entry: %O\n", entry))
|
|
||||||
unless (entry) continue;
|
|
||||||
|
|
||||||
t = htquote(entry["text"]);
|
/**** old stuff ****/
|
||||||
t = replace(t, "\n", "<br>\n");
|
|
||||||
t = replace(t, "<", "<");
|
|
||||||
t = replace(t, ">", ">");
|
|
||||||
|
|
||||||
string c = "";
|
// datenstruktur für threads?
|
||||||
if (entry["comments"])
|
|
||||||
foreach(mapping comment : entry["comments"])
|
|
||||||
c += "<div class='comment' title='" + isotime(ctime(comment["date"]), 1) + "'><span class='comment-author'>" + comment["nick"] + "</span>: <span class='comment-text'>" + comment["text"] + "</span></div>\n";
|
|
||||||
|
|
||||||
ht +=
|
|
||||||
"<div class='entry'>\n"
|
|
||||||
"<div class='title'>\n"
|
|
||||||
"<a href=\"" + url_prefix + "?id=" + entry["id"] + "\">"
|
|
||||||
"<span class='id'>#" + entry["id"] + "</span> - \n"
|
|
||||||
"<span class='author'>" + entry["author"] + "</span>\n"
|
|
||||||
+ (entry["thread"] && strlen(entry["thread"]) ? " - " : "") +
|
|
||||||
"<span class='subject'>" + htquote(entry["thread"]) + "</span>\n"
|
|
||||||
"</a>"
|
|
||||||
"</div>\n"
|
|
||||||
"<div class='body'>\n"
|
|
||||||
"<div class='text'>" + t + "</div>\n"
|
|
||||||
"<div id='comments-" + id_prefix + entry["id"] + "' class='comments'>" + c +
|
|
||||||
(submit && strlen(submit) ?
|
|
||||||
"<a onclick=\"toggle(this.nextSibling)\">» reply</a>"
|
|
||||||
"<div class='comment-submit hidden'>"
|
|
||||||
"<textarea autocomplete='off'></textarea>"
|
|
||||||
//FIXME: cmd is executed twice, because after a set-cookie it's parsed again
|
|
||||||
"<input type='button' value='Send' onclick=\"cmd('comment " + entry["id"] + " '+ this.previousSibling.value, '" + submit + "')\">"
|
|
||||||
"</div>" : "") +
|
|
||||||
"</div>\n"
|
|
||||||
"</div>\n"
|
|
||||||
"<div class='footer'>\n"
|
|
||||||
"<span class='date'>" + isotime(ctime(entry["date"]), 1) + "</span>\n"
|
|
||||||
"<span class='comments-link'>"
|
|
||||||
"<a onclick=\"toggle('comments-" + id_prefix + entry["id"] + "')\">" + sizeof(entry["comments"]) + " comments</a>"
|
|
||||||
"</span>\n"
|
|
||||||
"</div>\n"
|
|
||||||
"</div>\n";
|
|
||||||
}
|
|
||||||
P3((">>> ht: %O\n", ht))
|
|
||||||
return "<div class='threads'>" + ht + "</div>";
|
|
||||||
}
|
|
||||||
|
|
||||||
rssEntries(array(mapping) entries) {
|
|
||||||
string rss =
|
|
||||||
"<?xml version=\"1.0\" encoding=\"" SYSTEM_CHARSET "\" ?>\n"
|
|
||||||
"<rdf:RDF\n"
|
|
||||||
"xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n"
|
|
||||||
"xmlns=\"http://purl.org/rss/1.0/\">\n\n"
|
|
||||||
"<channel>\n"
|
|
||||||
"\t<title>PSYC - Protocol for Synchronous Conferencing</title>\n"
|
|
||||||
"\t<link>http://www.psyc.eu</link>\n"
|
|
||||||
"\t<description>News about the PSYC project</description>\n"
|
|
||||||
"</channel>\n";
|
|
||||||
|
|
||||||
foreach (mapping entry : entries) {
|
|
||||||
rss +=
|
|
||||||
"\n<item>\n"
|
|
||||||
"\t<title>"+ entry["thread"] +"</title>\n"
|
|
||||||
"\t<link>http://" + SERVER_HOST + ":33333" + webact + "?id=" + entry["id"] + "</link>\n"
|
|
||||||
"\t<description>" + entry["text"] + "</description>\n"
|
|
||||||
"\t<dc:date>" + isotime(ctime(entry["date"]), 1) + "</dc:date>\n"
|
|
||||||
"\t<dc:creator>" + entry["author"] + "</dc:creator>\n"
|
|
||||||
"</item>\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
rss += "</rdf:RDF>\n";
|
|
||||||
return rss;
|
|
||||||
}
|
|
||||||
|
|
||||||
jsEntries(array(mapping) entries) {
|
|
||||||
string js =
|
|
||||||
"function Entry(id, thread, author, date, text) {\n"
|
|
||||||
"\tthis.id = id;\n"
|
|
||||||
"\tthis.thread = thread;\n"
|
|
||||||
"\tthis.author = author;\n"
|
|
||||||
"\tthis.date = date;\n"
|
|
||||||
"\tthis.text = text;\n"
|
|
||||||
"}\n\n"
|
|
||||||
"document.blogentries = new Array(\n";
|
|
||||||
|
|
||||||
foreach (mapping entry : entries) {
|
|
||||||
js += "new Entry(" + entry["id"] + ","
|
|
||||||
"\"" + entry["thread"] + "\","
|
|
||||||
"\"" + entry["author"] + "\","
|
|
||||||
+ isotime(ctime(entry["date"]), 1) + ","
|
|
||||||
"\"" + entry["text"] + "\"),\n";
|
|
||||||
}
|
|
||||||
|
|
||||||
return js[..<3] + ");";
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonEntries(int limit, int offset) {
|
|
||||||
return make_json(entries(limit, offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
jsonExport(int limit, int offset) {
|
|
||||||
write(jsonEntries(limit, offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
jsExport(int limit, int offset) {
|
|
||||||
write(jsEntries(limit, offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
rssExport(int limit, int offset) {
|
|
||||||
write(rssEntries(entries(limit, offset)));
|
|
||||||
}
|
|
||||||
|
|
||||||
htMain(int limit, int offset, string chan) {
|
|
||||||
return htmlEntries(entries(limit, offset), 0, chan);
|
|
||||||
}
|
|
||||||
|
|
||||||
displayMain(int limit, int offset) {
|
|
||||||
write(htMain(limit, offset));
|
|
||||||
}
|
|
||||||
|
|
||||||
htEntry(int id) {
|
|
||||||
unless (_thread && id >= 0 && id <= sizeof(_thread) && _thread[id]) return 0;
|
|
||||||
return htmlEntries(({ _thread[id] }));
|
|
||||||
}
|
|
||||||
|
|
||||||
displayEntry(int id) {
|
|
||||||
write(htEntry(id) || "No such entry.");
|
|
||||||
}
|
|
||||||
|
|
||||||
// wir können zwei strategien fahren.. die technisch einfachere ist es
|
|
||||||
// die reihenfolge der elemente festzulegen und für jedes ein w(_HTML_xy
|
|
||||||
// auszuspucken. flexibler wär's stattdessen wenn jede seite ein einziges
|
|
||||||
// w(_PAGES_xy ausgeben würde in dem es per [_HTML_list_threads] oder
|
|
||||||
// ähnlichem die blog-elemente per psyctext-vars übergibt ... dann kann
|
|
||||||
// es immernoch per {_HTML_head_threads} header und footer einheitlich
|
|
||||||
// halten. womöglich kann man auch nachträglich plan A in plan B
|
|
||||||
// umwandeln..... hmmm -lynX
|
|
||||||
//
|
//
|
||||||
displayHeader() {
|
// bestehende struktur ist: großes array von entries.
|
||||||
w("_HTML_head_threads",
|
//
|
||||||
"<html><head><link rel='stylesheet' type='text/css' href='"+ STYLESHEET +"'></head>\n"+
|
// wie wärs mit mapping mit key=threadname und value=array-of-entries
|
||||||
"<body class='threads'>\n\n");
|
// subjects werden abgeschafft: sie sind der name des threads
|
||||||
}
|
// wer einen thread in seinem reply umnennen will legt in wirklichkeit
|
||||||
displayFooter() {
|
// einen neuen thread an, meinetwegen mit "was: old thread"
|
||||||
w("_HTML_tail_threads", "</body></html>");
|
//
|
||||||
}
|
// der nachteil an solch einer struktur wäre, dass man neue comments
|
||||||
|
// in alten threads nicht so schnell findet - man ist auf die notification
|
||||||
|
// angewiesen, was andererseits die stärke von psycblogs ist.
|
||||||
nntpget(cmd, args) {
|
// man könnte die notifications zudem noch in die history einspeisen..
|
||||||
mapping item;
|
//
|
||||||
int i;
|
// nachteile an der bestehenden struktur ist: 1. threadname in jeder
|
||||||
P2(("calling nntpget %s with %O\n", cmd, args))
|
// entry, 2. threads nur mittels durchlauf des ganzen blogs darstellbar
|
||||||
switch(cmd) {
|
//
|
||||||
case "LIST":
|
// momentmal.. das was du "comments" nennst sind doch schon die threads!
|
||||||
write(MYNICK + " 0 1 n\n");
|
|
||||||
break;
|
|
||||||
case "ARTICLE":
|
|
||||||
i = to_int(args) - 1;
|
|
||||||
P2(("i is: %d\n", i))
|
|
||||||
P2(("entries: %O\n", _thread))
|
|
||||||
unless (_thread && i >= 0 && i <= sizeof(_thread) && _thread[i]) break;
|
|
||||||
item = _thread[i];
|
|
||||||
write(S("220 %d <%s%d@%s> article\n",
|
|
||||||
i + 1, MYNICK, i + 1, SERVER_HOST));
|
|
||||||
write(S("From: %s\n", item["author"]));
|
|
||||||
write(S("Newsgroups: %s\n", MYNICK));
|
|
||||||
write(S("Subject: %s\n", item["thread"]));
|
|
||||||
write(S("Date: %s\n", isotime(ctime(item["date"]), 1)));
|
|
||||||
write(S("Xref: %s %s:%d\n", SERVER_HOST, MYNICK, i + 1));
|
|
||||||
write(S("Message-ID: <%s$%d@%s>\n", MYNICK, i+1, SERVER_HOST));
|
|
||||||
write("\n");
|
|
||||||
write(item["text"]);
|
|
||||||
write("\n.\n");
|
|
||||||
break;
|
|
||||||
case "GROUP":
|
|
||||||
write(S("211 %d 1 %d %s\n", sizeof(_thread),
|
|
||||||
sizeof(_thread), MYNICK));
|
|
||||||
break;
|
|
||||||
case "XOVER":
|
|
||||||
for (i = 0; i < sizeof(_thread); i++) {
|
|
||||||
unless(item = _thread[i]) continue;
|
|
||||||
P2(("item: %O\n", item))
|
|
||||||
write(S("%d\t%s\t%s\t%s <%s%d@%s>\t1609\t22\tXref: news.t-online.com\t%s:%d\n",
|
|
||||||
i+1, item["thread"],
|
|
||||||
item["author"], isotime(ctime(item["date"]), 1),
|
|
||||||
MYNICK, i+1,
|
|
||||||
SERVER_HOST, MYNICK, i+1));
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
P2(("unimplemented nntp command: %s\n", cmd))
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
canPost(snicker) {
|
|
||||||
return qAide(snicker);
|
|
||||||
}
|
|
||||||
|
|
||||||
mayLog(mc) {
|
|
||||||
return abbrev("_notice_thread", mc) || abbrev("_message", mc);
|
|
||||||
}
|
|
||||||
|
|
||||||
showWebLog() {
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
numEntries() {
|
|
||||||
return sizeof(_thread);
|
|
||||||
}
|
|
||||||
|
|
||||||
// old stuff
|
|
||||||
|
|
||||||
#if 0
|
#if 0
|
||||||
_request_iterator(source, mc, data, vars, b) {
|
_request_iterator(source, mc, data, vars, b) {
|
||||||
|
|
|
@ -4,8 +4,6 @@
|
||||||
|
|
||||||
#define BLAME "!configuration"
|
#define BLAME "!configuration"
|
||||||
#define DONT_REWRITE_NICKS
|
#define DONT_REWRITE_NICKS
|
||||||
#define PLACE_HISTORY
|
|
||||||
#define PLACE_OWNED
|
|
||||||
#define HISTORY_GLIMPSE 12
|
#define HISTORY_GLIMPSE 12
|
||||||
|
|
||||||
#include <uniform.h>
|
#include <uniform.h>
|
||||||
|
@ -28,7 +26,7 @@ load(name, keep) {
|
||||||
P3((">> userthreads:load(%O, %O)\n", name, keep))
|
P3((">> userthreads:load(%O, %O)\n", name, keep))
|
||||||
|
|
||||||
sscanf(name, "~%s#%s", owner, channel);
|
sscanf(name, "~%s#%s", owner, channel);
|
||||||
vSet("owners", ([ owner: 0 ]));
|
vSet("owners", ([ lower_case(owner) ]));
|
||||||
vSet("privacy", "private");
|
vSet("privacy", "private");
|
||||||
vSet("twitter", 0);
|
vSet("twitter", 0);
|
||||||
vSet("identica", 0);
|
vSet("identica", 0);
|
||||||
|
@ -176,8 +174,9 @@ _request_identica(source, mc, data, vars, b) {
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
addEntry(text, unick, thread) {
|
varargs int addEntry(mixed source, string snicker, string text, string title, int parent_id) {
|
||||||
if (::addEntry(text, unick, thread)) {
|
int ret;
|
||||||
|
if (ret = ::addEntry(source, snicker, text, title, parent_id)) {
|
||||||
#ifdef TWITTER
|
#ifdef TWITTER
|
||||||
if (v("twitter") && twitter) twitter->status_update(text);
|
if (v("twitter") && twitter) twitter->status_update(text);
|
||||||
#endif
|
#endif
|
||||||
|
@ -185,6 +184,7 @@ addEntry(text, unick, thread) {
|
||||||
if (v("identica") && identica) identica->status_update(text);
|
if (v("identica") && identica) identica->status_update(text);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
htMain(int limit, int offset) {
|
htMain(int limit, int offset) {
|
||||||
|
@ -218,3 +218,9 @@ psycName() {
|
||||||
pathName() {
|
pathName() {
|
||||||
return regreplace(MYNICK, "#", "/", 1);
|
return regreplace(MYNICK, "#", "/", 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#ifdef _flag_save_userthreads_immediately
|
||||||
|
qSaveImmediately() {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
|
@ -46,7 +46,7 @@ void status_update(string text) {
|
||||||
fetch(ua, api_base_url + "/statuses/update.json", "POST", (["status": text]));
|
fetch(ua, api_base_url + "/statuses/update.json", "POST", (["status": text]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#if 1 //not used, just an example
|
#if 0 //not used, just an example
|
||||||
void parse_home_timeline(string body, string headers, int http_status) {
|
void parse_home_timeline(string body, string headers, int http_status) {
|
||||||
P3(("twitter/client:parse_home_timeline(%O, %O, %O)\n", body, headers, http_status))
|
P3(("twitter/client:parse_home_timeline(%O, %O, %O)\n", body, headers, http_status))
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,7 +45,7 @@ body.threads,
|
||||||
margin: 44;
|
margin: 44;
|
||||||
width: 562;
|
width: 562;
|
||||||
}
|
}
|
||||||
.entry .title,
|
.entry .header,
|
||||||
.ldpc {
|
.ldpc {
|
||||||
background: #f33;
|
background: #f33;
|
||||||
color: black;
|
color: black;
|
||||||
|
@ -110,11 +110,11 @@ body.threads,
|
||||||
width: 100%;
|
width: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
.entry .title a {
|
.entry .header a {
|
||||||
color: black;
|
color: black;
|
||||||
}
|
}
|
||||||
.entry .title .author {}
|
.entry .header .author {}
|
||||||
.entry .title .subject {}
|
.entry .header .title {}
|
||||||
|
|
||||||
.entry .footer a,
|
.entry .footer a,
|
||||||
.entry .footer a:visited {
|
.entry .footer a:visited {
|
||||||
|
|
Loading…
Reference in a new issue