git ssb

0+

cel / ssb-wikimedia



Tree: a9397726c101b1c2984678dd09d09f3a57d1cfb5

Files: a9397726c101b1c2984678dd09d09f3a57d1cfb5 / bin.js

14806 bytesRaw
1#!/usr/bin/env node
2
3var fs = require('fs')
4var path = require('path')
5var URL = require('url')
6var http = require('http')
7var https = require('https')
8var crypto = require('crypto')
9var readline = require('readline')
10var os = require('os')
11
12var ssbClient = require('ssb-client')
13var pull = require('pull-stream')
14
15var pkg = require('./package')
16
17var userAgentBase = pkg.name + '/' + pkg.version
18var userAgentContact
19var userAgentBot = false
20
21function estimateMessageSize(content) {
22 var draftMsg = {
23 key: '%0000000000000000000000000000000000000000000=.sha256',
24 value: {
25 previous: '%0000000000000000000000000000000000000000000=.sha256',
26 author: '@0000000000000000000000000000000000000000000=.ed25519',
27 sequence: 100000,
28 timestamp: 1000000000000.0001,
29 hash: 'sha256',
30 content: content,
31 signature: '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000==.sig.ed25519'
32 }
33 }
34 return JSON.stringify(draftMsg, null, 2).length
35}
36
37function mapCollect(fn) {
38 var aborted
39 return function (read) {
40 var queue = []
41 return function (abort, cb) {
42 if (aborted = abort) return read(abort, cb)
43 read(null, function next(end, data) {
44 if (end) return cb(end)
45 queue.push(data)
46 var result = fn(queue)
47 if (result) read(null, next)
48 })
49 }
50 }
51}
52
53function getJson(url, cb) {
54 var opts = URL.parse(url)
55 opts.headers = {
56 'User-Agent': userAgentBase
57 + (userAgentContact ? ' (' + userAgentContact + ')' : '')
58 + (userAgentBot ? ' bot' : '')
59 }
60 var h = opts.protocol === 'https:' ? https : http
61 h.get(opts, function (res) {
62 if (res.statusCode !== 200) return cb(new Error('HTTP ' + res.statusCode + ' ' + res.statusMessage))
63 var bufs = []
64 res.on('data', function (buf) {
65 bufs.push(buf)
66 })
67 res.on('end', function () {
68 res.removeListener('error', cb)
69 var buf = Buffer.concat(bufs)
70 bufs = null
71 var data
72 try {
73 data = JSON.parse(buf.toString('utf8'))
74 } catch(e) {
75 return cb(e)
76 }
77 cb(null, data)
78 })
79 res.on('error', cb)
80 })
81}
82
83function publishDrafts(sbot, drafts, cb) {
84 var draftIdIndex = {}
85 drafts.forEach(function (draft, i) {
86 draftIdIndex[draft.draftId] = i
87 })
88 var ids = []
89
90 function replaceDraftIds(obj) {
91 if (typeof obj === 'string') {
92 var i = draftIdIndex[obj]
93 if (typeof i === 'number') {
94 var id = ids[i]
95 if (!id) throw new ReferenceError('draft referernces unknown message')
96 return id
97 }
98 } else if (Array.isArray(obj)) {
99 return obj.map(replaceDraftIds)
100 } else if (obj !== null && typeof obj === 'object') {
101 var o = {}
102 for (var k in obj) o[k] = replaceDraftIds(obj[k])
103 return o
104 }
105 return obj
106 }
107
108 pull(
109 pull.values(drafts),
110 pull.asyncMap(function (draft, cb) {
111 var content = replaceDraftIds(draft.content)
112 sbot.publish(content, function (err, msg) {
113 if (err) return cb(err)
114 ids.push(msg.key)
115 cb(null, msg)
116 })
117 }),
118 pull.collect(cb)
119 )
120}
121
122var args = process.argv.slice(2)
123var yes = false
124var dry = false
125var help = false
126var urls = []
127args.forEach(function (arg) {
128 if (arg[0] === '-') switch (arg) {
129 case '-n': return dry = true
130 case '-y': return yes = true
131 case '-h': return help = true
132 default: throw 'Unknown argument: ' + arg
133 } else urls.push(arg)
134})
135
136if (help) {
137 process.stdout.write(fs.readFileSync(path.join(__dirname, 'usage.txt')))
138 process.exit(0)
139}
140
141ssbClient(function (err, sbot, config) {
142 if (err) throw err
143 var conf = config.wikimedia || {}
144 userAgentContact = conf.contact
145 userAgentBot = conf.bot
146
147 if (urls.length === 0) {
148 var pagesFile = path.join(config.path, 'wikimedia-pages.txt')
149 var pagesData = fs.readFileSync(pagesFile, 'utf8')
150 urls = pagesData.split('\n').filter(RegExp.prototype.test.bind(/[^#]/))
151 if (!urls.length) {
152 console.log('No pages to sync.')
153 return sbot.close()
154 }
155 }
156
157 var pagesInfo = urls.map(function (page) {
158 var m = /^(https?:\/\/.*?)(\/wiki)?\/(.*)$/.exec(page)
159 if (!m) throw 'Unable to parse page URL ' + page
160 return {
161 site: m[1] + '/',
162 api: m[1] + (m[2] ? '/w' : '/wiki') + '/api.php',
163 title: m[3]
164 }
165 })
166 var pagesInfoByApi = {}
167 pagesInfo.forEach(function (pageInfo) {
168 var infos = pagesInfoByApi[pageInfo.api] || (pagesInfoByApi[pageInfo.api] = [])
169 infos.push(pageInfo)
170 })
171 console.log('Normalizing titles...')
172 var waiting = 0
173 for (var api in pagesInfoByApi) (function (api) {
174 var pagesInfoForApi = pagesInfoByApi[api]
175 var pagesInfoForApiByTitle = {}
176 var titles = pagesInfoForApi.map(function (info) {
177 pagesInfoForApiByTitle[info.title] = info
178 return info.title
179 })
180 var url = api + '?format=json&action=query' +
181 '&titles=' + encodeURIComponent('\x1f' + titles.join('\x1f')) +
182 '&' // trailing & needed for some reason
183 waiting++
184 getJson(url, function (err, data) {
185 if (err) throw err
186 if (data.warnings) console.trace('Warnings:', data.warnings)
187 if (data.query.normalized) data.query.normalized.forEach(function (norm) {
188 var info = pagesInfoForApiByTitle[norm.from]
189 if (!info) {
190 console.error(JSON.stringify({titles: titles, response: data}, 0, 2))
191 throw new Error('Unexpected title in server response')
192 }
193 // console.log('Normalized title', norm.from, norm.to)
194 info.title = norm.to
195 })
196 if (!--waiting) next()
197 })
198 }(api))
199
200 function next() {
201 console.log('Getting revisions...')
202 var userHashes = {}
203 pull(
204 pull.values(pagesInfo),
205 pull.asyncMap(function (pageInfo, cb) {
206 // Calculate blob id for page URL + title, for linking
207 pull(
208 pull.once(pageInfo.site + '\t' + pageInfo.title),
209 sbot.blobs.add(function (err, hash) {
210 pageInfo.hash = hash
211 cb(null, pageInfo)
212 })
213 )
214 }),
215 pull.asyncMap(function (pageInfo, cb) {
216 // Get previous messages for this page.
217 // Simple solution: find the revision with latest timestamp.
218 var maxRevTs = ''
219 var maxRevMsgId
220 pull(
221 sbot.links({
222 dest: pageInfo.hash,
223 rel: 'pageId',
224 values: true,
225 meta: false
226 }),
227 pull.filter(function (msg) {
228 var c = msg && msg.value && msg.value.content
229 return c
230 && c.type === 'wikimedia/revisions'
231 && c.site === pageInfo.site
232 && c.title === pageInfo.title
233 }),
234 pull.drain(function (msg) {
235 var c = msg && msg.value && msg.value.content
236 var revs = Array.isArray(c.revisions) && c.revisions
237 if (revs) revs.forEach(function (rev) {
238 if (rev && rev.timestamp > maxRevTs) {
239 maxRevTs = rev.timestamp
240 maxRevMsgId == msg.key
241 }
242 })
243 }, function (err) {
244 if (err) return cb(err)
245 pageInfo.latestMsgId = maxRevMsgId
246 pageInfo.latestRevTs = maxRevTs
247 cb(null, pageInfo)
248 })
249 )
250 }),
251 pull.map(function (pageInfo) {
252 // Get new revisions.
253 var rvcontinue, rvdone
254 var rvstart = pageInfo.latestRevTs
255 var prevId = pageInfo.latestMsgId
256 var aborted
257 var revisions = pull(
258 function (abort, cb) {
259 if (aborted = abort) return cb(abort)
260 if (rvdone) return cb(true)
261 console.log('Getting revisions for', pageInfo.title + '...',
262 rvstart || '', rvcontinue || '')
263 var url = api + '?format=json&action=query&prop=revisions&rvslots=*'
264 + '&titles=' + encodeURIComponent(pageInfo.title)
265 + '&rvprop=ids|timestamp|comment|user|sha1|size|slotsha1|slotsize|content|roles|flags|tags'
266 + '&rvdir=newer'
267 + (rvcontinue ? '&rvcontinue=' + rvcontinue : '')
268 + (rvstart ? '&rvstart=' + rvstart : '')
269 + '&rvlimit=50'
270 getJson(url, function (err, data) {
271 if (aborted) return err && console.trace(err)
272 if (err) return cb(err)
273
274 var warnings = data.warnings
275 if (warnings) {
276 if (warnings.main) {
277 if (warnings.main['*'] === 'Unrecognized parameter: rvslots.') {
278 delete warnings.main['*']
279 if (Object.keys(warnings.main).length === 0) {
280 delete warnings.main
281 }
282 }
283 }
284 if (warnings.revisions) {
285 if (warnings.revisions['*'] === 'Unrecognized values for parameter "rvprop": slotsha1, slotsize, roles.') {
286 delete warnings.revisions['*']
287 if (Object.keys(warnings.revisions).length === 0) {
288 delete warnings.revisions
289 }
290 }
291 }
292 if (Object.keys(warnings).length > 0) {
293 console.trace('Warnings:', warnings)
294 }
295 }
296
297 rvcontinue = data.continue && data.continue.rvcontinue
298 if (!rvcontinue) rvdone = true
299 var page
300 if (data.query) for (var pageid in data.query.pages) {
301 page = data.query.pages[pageid]
302 if (page.title === pageInfo.title) break
303 else page = null
304 }
305 if (!page) {
306 console.trace(data.query.pages, pageInfo)
307 return cb(new Error('Unable to find page'))
308 }
309 var revs = page.revisions || []
310 console.log('Got ' + revs.length + ' revisions')
311 cb(null, revs)
312 })
313 },
314 pull.flatten(),
315
316 pull.through(function (rev) {
317 if (!rev.slots) {
318 // old API does not use slots.
319 // Transform result to be forward-compatible.
320 rev.slots = {
321 main: {
322 size: rev.size,
323 sha1: rev.sha1,
324 contentmodel: rev.contentmodel,
325 contentformat: rev.contentformat,
326 '*': rev['*']
327 }
328 }
329 delete rev.contentmodel
330 delete rev.contentformat
331 delete rev['*']
332 }
333 // duplicate values supplied in new API in slotsize and slotsha1
334 delete rev.sha1
335 delete rev.size
336 }),
337
338 pull.asyncMap(function (rev, cb) {
339 // Calculate blob id for user page URL + title, for linking
340 var hash = userHashes[rev.user]
341 if (hash) {
342 rev.userId = hash
343 return cb(null, rev)
344 }
345 pull(
346 pull.once(pageInfo.site + '\tUser:' + rev.user),
347 sbot.blobs.add(function (err, hash) {
348 rev.userId = userHashes[rev.user] = hash
349 cb(null, rev)
350 })
351 )
352 }),
353 pull.asyncMap(function (rev, cb) {
354 var waiting = 0
355 for (var slot in rev.slots) (function (slot) {
356 waiting++
357 var slotInfo = rev.slots[slot]
358 var content = slotInfo['*']
359 if (!content) {
360 console.trace(slotInfo)
361 return cb(new Error('Missing content'))
362 }
363 var sha1 = crypto.createHash('sha1').update(content).digest('hex')
364 if (sha1 !== slotInfo.sha1) {
365 console.trace(slotInfo, sha1)
366 return cb(new Error('Mismatched content sha1'))
367 }
368 pull(
369 pull.once(content),
370 sbot.blobs.add(function (err, hash) {
371 if (err) return cb(err)
372 slotInfo.link = hash
373 delete slotInfo['*']
374 if (!--waiting) cb(null, rev)
375 })
376 )
377 }(slot))
378 })
379 )
380
381 var queuedRevisions = []
382 var ended
383 function cbDraft(content, cb) {
384 if (!content.revisions.length) {
385 console.log('No revisions for', pageInfo.title)
386 return cb(true)
387 }
388 console.log('Prepared a message',
389 'with', content.revisions.length, 'revisions',
390 'for', pageInfo.title)
391 prevId = '%' + crypto.createHash('sha256').update(JSON.stringify(content)).digest('base64') + '.draft6'
392 cb(null, {
393 draftId: prevId,
394 content: content
395 })
396 }
397 return function (abort, cb) {
398 if (abort) return revisions(abort, cb)
399 if (ended) return cb(true)
400 var content = {
401 type: 'wikimedia/revisions',
402 site: pageInfo.site,
403 title: pageInfo.title,
404 pageId: pageInfo.hash,
405 parents: prevId ? [prevId] : undefined,
406 revisions: queuedRevisions.splice(0)
407 }
408 revisions(null, function next(end, revision) {
409 if (ended = end) return cbDraft(content, cb)
410 content.revisions.push(revision)
411 if (estimateMessageSize(content) > 8192) {
412 queuedRevisions.push(content.revisions.pop())
413 // console.log('filled msg for ', pageInfo.title, ' with ', content.revisions.length, 'revisions')
414 return cbDraft(content, cb)
415 }
416 revisions(null, next)
417 })
418 }
419 }),
420 pull.flatten(),
421 pull.collect(function (err, drafts) {
422 if (err) throw err
423 if (dry) {
424 console.log(JSON.stringify(drafts, 0, 2))
425 return sbot.close()
426 }
427 if (!drafts.length) {
428 console.log('No messages to publish.')
429 return sbot.close()
430 }
431 if (yes) return confirmed(true)
432 var rl = readline.createInterface({
433 input: process.stdin,
434 output: process.stdout
435 })
436 rl.question('Publish ' + drafts.length + ' messages? [Y/n] ', function (answer) {
437 rl.close()
438 confirmed(!/^n/i.test(answer))
439 })
440 function confirmed(yes) {
441 if (!yes) return sbot.close()
442 publishDrafts(sbot, drafts, function (err, msgs) {
443 if (err) throw err
444 console.log('Published:\n' + msgs.map(function (msg) {
445 return msg.key
446 }.join('\n')))
447 sbot.close()
448 })
449 }
450 })
451 )
452 }
453})
454

Built with git-ssb-web