mirror of
https://github.com/flynx/pWiki.git
synced 2025-12-27 05:01:57 +00:00
cleanup and fixes...
Signed-off-by: Alex A. Naanou <alex.nanou@gmail.com>
This commit is contained in:
parent
0d2739e796
commit
128b4276f7
@ -482,6 +482,35 @@ object.Mixin('ArrayProtoMixin', 'soft', {
|
|||||||
this.constructor.zip(this, func, ...arrays)
|
this.constructor.zip(this, func, ...arrays)
|
||||||
: this.constructor.zip(func, this, ...arrays) },
|
: this.constructor.zip(func, this, ...arrays) },
|
||||||
|
|
||||||
|
// Insert new values between elements of an array
|
||||||
|
//
|
||||||
|
// .between(value)
|
||||||
|
// -> array
|
||||||
|
//
|
||||||
|
// .between(func)
|
||||||
|
// -> array
|
||||||
|
//
|
||||||
|
// func([a, b], from_index, to_index, array)
|
||||||
|
// -> elem
|
||||||
|
//
|
||||||
|
between: function(func){
|
||||||
|
var res = []
|
||||||
|
// NOTE: we skip the last element...
|
||||||
|
for(var i=0; i < this.length; i+=1){
|
||||||
|
var pair = new Array(2)
|
||||||
|
i in this ?
|
||||||
|
res.push(pair[0] = this[i])
|
||||||
|
: (res.length += 1)
|
||||||
|
if(i+1 >= this.length){
|
||||||
|
break }
|
||||||
|
i+1 in this
|
||||||
|
&& (pair[1] = this[i+1])
|
||||||
|
res.push(
|
||||||
|
typeof(func) == 'function' ?
|
||||||
|
func.call(this, pair, i, res.length, this)
|
||||||
|
: func) }
|
||||||
|
return res },
|
||||||
|
|
||||||
// get iterator over array...
|
// get iterator over array...
|
||||||
//
|
//
|
||||||
// Array.iter()
|
// Array.iter()
|
||||||
|
|||||||
@ -294,6 +294,22 @@ object.Constructor('IterablePromise', Promise, {
|
|||||||
.then(function(){
|
.then(function(){
|
||||||
return res }) },
|
return res }) },
|
||||||
|
|
||||||
|
// XXX BETWEEN...
|
||||||
|
between: function(func){
|
||||||
|
var i = 0
|
||||||
|
var j = 0
|
||||||
|
var prev
|
||||||
|
return this.constructor(this,
|
||||||
|
function(e){
|
||||||
|
return i++ > 0 ?
|
||||||
|
[
|
||||||
|
typeof(func) == 'function' ?
|
||||||
|
func.call(this, [prev, e], i, i + j++)
|
||||||
|
: func,
|
||||||
|
e,
|
||||||
|
]
|
||||||
|
: [e] }) },
|
||||||
|
|
||||||
// XXX .chain(..) -- see generator.chain(..)
|
// XXX .chain(..) -- see generator.chain(..)
|
||||||
|
|
||||||
flat: function(depth=1){
|
flat: function(depth=1){
|
||||||
|
|||||||
@ -223,6 +223,15 @@ object.Mixin('GeneratorMixin', 'soft', {
|
|||||||
reduce: makeGenerator('reduce'),
|
reduce: makeGenerator('reduce'),
|
||||||
reduceRight: makeGenerator('reduceRight'),
|
reduceRight: makeGenerator('reduceRight'),
|
||||||
|
|
||||||
|
between: makeGenerator('between'),
|
||||||
|
|
||||||
|
// XXX EXPERIMENTAL
|
||||||
|
// XXX add .toString(..) to this???
|
||||||
|
forEach: function(func){
|
||||||
|
var that = this
|
||||||
|
return function(){
|
||||||
|
return that(...arguments).forEach(func) } },
|
||||||
|
|
||||||
// non-generators...
|
// non-generators...
|
||||||
//
|
//
|
||||||
toArray: function(){
|
toArray: function(){
|
||||||
@ -397,6 +406,26 @@ object.Mixin('GeneratorProtoMixin', 'soft', {
|
|||||||
greduce: function*(func, res){
|
greduce: function*(func, res){
|
||||||
yield this.reduce(...arguments) },
|
yield this.reduce(...arguments) },
|
||||||
|
|
||||||
|
between: stoppable(function*(func){
|
||||||
|
var i = 0
|
||||||
|
var j = 0
|
||||||
|
var prev
|
||||||
|
for(var e of this){
|
||||||
|
if(i > 0){
|
||||||
|
yield typeof(func) == 'function' ?
|
||||||
|
func.call(this, [prev, e], i-1, i + j++, this)
|
||||||
|
: func }
|
||||||
|
prev = e
|
||||||
|
yield e
|
||||||
|
i++ } }),
|
||||||
|
|
||||||
|
// NOTE: this is a special case in that it will unwind the generator...
|
||||||
|
// NOTE: this is different from <array>.forEach(..) in that this will
|
||||||
|
// return the resulting array.
|
||||||
|
// XXX EXPERIMENTAL
|
||||||
|
forEach: function(func){
|
||||||
|
return [...this].map(func) },
|
||||||
|
|
||||||
pop: function(){
|
pop: function(){
|
||||||
return [...this].pop() },
|
return [...this].pop() },
|
||||||
// XXX this needs the value to be iterable...
|
// XXX this needs the value to be iterable...
|
||||||
@ -490,6 +519,9 @@ object.Mixin('AsyncGeneratorMixin', 'soft', {
|
|||||||
map: makeGenerator('async', 'map'),
|
map: makeGenerator('async', 'map'),
|
||||||
filter: makeGenerator('async', 'filter'),
|
filter: makeGenerator('async', 'filter'),
|
||||||
reduce: makeGenerator('async', 'reduce'),
|
reduce: makeGenerator('async', 'reduce'),
|
||||||
|
|
||||||
|
// XXX TEST...
|
||||||
|
between: makeGenerator('async', 'between'),
|
||||||
})
|
})
|
||||||
|
|
||||||
var AsyncGeneratorProtoMixin =
|
var AsyncGeneratorProtoMixin =
|
||||||
@ -547,6 +579,21 @@ object.Mixin('AsyncGeneratorProtoMixin', 'soft', {
|
|||||||
return [] })
|
return [] })
|
||||||
return state },
|
return state },
|
||||||
|
|
||||||
|
// XXX BETWEEN...
|
||||||
|
between: async function*(func){
|
||||||
|
var i = 0
|
||||||
|
var j = 0
|
||||||
|
var prev
|
||||||
|
yield* this.iter(function(e){
|
||||||
|
return i++ > 0 ?
|
||||||
|
[
|
||||||
|
typeof(func) == 'function' ?
|
||||||
|
func.call(this, [prev, e], i, i + j++, this)
|
||||||
|
: func,
|
||||||
|
e,
|
||||||
|
]
|
||||||
|
: [e] }) },
|
||||||
|
|
||||||
// XXX TEST...
|
// XXX TEST...
|
||||||
chain: async function*(...next){
|
chain: async function*(...next){
|
||||||
yield* next
|
yield* next
|
||||||
|
|||||||
@ -9,13 +9,13 @@
|
|||||||
|
|
||||||
var WIKIWORD_PATTERN =
|
var WIKIWORD_PATTERN =
|
||||||
RegExp('('+[
|
RegExp('('+[
|
||||||
|
// /some/path | ./some/path | ../some/path | >>/some/path
|
||||||
|
'(?:^|\\s)(|\\.|\\.\\.|>>)[\\/\\\\][^\\s]+',
|
||||||
|
// [path]
|
||||||
|
'\\\\?\\[[^\\]]+\\]',
|
||||||
// WikiWord
|
// WikiWord
|
||||||
//'\\\\?(\\/|\\./|\\.\\./|>>|[A-Z][_a-z0-9]+[A-Z/])[_a-zA-Z0-9/]*',
|
//'\\\\?(\\/|\\./|\\.\\./|>>|[A-Z][_a-z0-9]+[A-Z/])[_a-zA-Z0-9/]*',
|
||||||
'\\\\?\\/?(\\./|\\.\\./|>>|[A-Z][_a-z0-9]+[A-Z/])[_a-zA-Z0-9/]*',
|
'\\\\?\\/?(\\./|\\.\\./|>>|[A-Z][_a-z0-9]+[A-Z/])[_a-zA-Z0-9/]*',
|
||||||
// [path]
|
|
||||||
'\\\\?\\[[^\\]]+\\]',
|
|
||||||
// /some/path | ./some/path | ../some/path | >>/some/path
|
|
||||||
'(?:^|\\s)(|\\.|\\.\\.|>>)[\\/\\\\][^\\s]+'
|
|
||||||
].join('|') +')', 'g')
|
].join('|') +')', 'g')
|
||||||
|
|
||||||
// XXX RENAME...
|
// XXX RENAME...
|
||||||
|
|||||||
110
pwiki/page.js
110
pwiki/page.js
@ -25,11 +25,7 @@ var relProxy =
|
|||||||
function(name){
|
function(name){
|
||||||
var func = function(path='.', ...args){
|
var func = function(path='.', ...args){
|
||||||
return this.store[name](
|
return this.store[name](
|
||||||
/* XXX RELATIVE
|
|
||||||
pwpath.relative(this.location+'/', path),
|
|
||||||
/*/
|
|
||||||
pwpath.relative(this.location, path),
|
pwpath.relative(this.location, path),
|
||||||
//*/
|
|
||||||
...args) }
|
...args) }
|
||||||
Object.defineProperty(func, 'name', {value: name})
|
Object.defineProperty(func, 'name', {value: name})
|
||||||
return func }
|
return func }
|
||||||
@ -40,11 +36,7 @@ function(name){
|
|||||||
strict = path
|
strict = path
|
||||||
path = '.' }
|
path = '.' }
|
||||||
return this.store[name](
|
return this.store[name](
|
||||||
/* XXX RELATIVE
|
|
||||||
pwpath.relative(this.location+'/', path),
|
|
||||||
/*/
|
|
||||||
pwpath.relative(this.location, path),
|
pwpath.relative(this.location, path),
|
||||||
//*/
|
|
||||||
strict) }
|
strict) }
|
||||||
Object.defineProperty(func, 'name', {value: name})
|
Object.defineProperty(func, 'name', {value: name})
|
||||||
return func }
|
return func }
|
||||||
@ -227,11 +219,7 @@ object.Constructor('BasePage', {
|
|||||||
__update__: function(data){
|
__update__: function(data){
|
||||||
return this.store.update(this.location, data) },
|
return this.store.update(this.location, data) },
|
||||||
__delete__: function(path='.'){
|
__delete__: function(path='.'){
|
||||||
/* XXX RELATIVE
|
|
||||||
return this.store.delete(pwpath.relative(this.location+'/', path)) },
|
|
||||||
/*/
|
|
||||||
return this.store.delete(pwpath.relative(this.location, path)) },
|
return this.store.delete(pwpath.relative(this.location, path)) },
|
||||||
//*/
|
|
||||||
|
|
||||||
// page data...
|
// page data...
|
||||||
//
|
//
|
||||||
@ -277,7 +265,20 @@ object.Constructor('BasePage', {
|
|||||||
|
|
||||||
// relative proxies to store...
|
// relative proxies to store...
|
||||||
exists: relProxy('exists'),
|
exists: relProxy('exists'),
|
||||||
|
//* XXX MATCH
|
||||||
match: relMatchProxy('match'),
|
match: relMatchProxy('match'),
|
||||||
|
/*/
|
||||||
|
match: async function(path='.', strict=false){
|
||||||
|
if(path === true || path === false){
|
||||||
|
strict = path
|
||||||
|
path = '.' }
|
||||||
|
path = pwpath.relative(this.location, path)
|
||||||
|
var res = await this.store.match(path, strict)
|
||||||
|
return res.length == 0 ?
|
||||||
|
// XXX are we going outside of match semantics here???
|
||||||
|
this.store.find(path)
|
||||||
|
: res },
|
||||||
|
//*/
|
||||||
resolve: relMatchProxy('resolve'),
|
resolve: relMatchProxy('resolve'),
|
||||||
delete: function(path='.'){
|
delete: function(path='.'){
|
||||||
this.__delete__()
|
this.__delete__()
|
||||||
@ -308,11 +309,7 @@ object.Constructor('BasePage', {
|
|||||||
strict = path
|
strict = path
|
||||||
path = '.' }
|
path = '.' }
|
||||||
return this.store.find(
|
return this.store.find(
|
||||||
/* XXX RELATIVE
|
|
||||||
//pwpath.relative(this.location+'/', path), strict) },
|
|
||||||
/*/
|
|
||||||
pwpath.relative(this.location, path), strict) },
|
pwpath.relative(this.location, path), strict) },
|
||||||
//*/
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// .get(<path>[, <data>])
|
// .get(<path>[, <data>])
|
||||||
@ -346,6 +343,10 @@ object.Constructor('BasePage', {
|
|||||||
: paths instanceof Promise ?
|
: paths instanceof Promise ?
|
||||||
await paths
|
await paths
|
||||||
: [paths]
|
: [paths]
|
||||||
|
// XXX MATCH
|
||||||
|
paths = paths.length == 0 ?
|
||||||
|
[await this.find(path)]
|
||||||
|
: paths
|
||||||
|
|
||||||
for(var path of paths){
|
for(var path of paths){
|
||||||
yield this.get('/'+ path) } },
|
yield this.get('/'+ path) } },
|
||||||
@ -632,7 +633,6 @@ object.Constructor('Page', BasePage, {
|
|||||||
// <filter> <filter-spec>
|
// <filter> <filter-spec>
|
||||||
// | -<filter> <filter-spec>
|
// | -<filter> <filter-spec>
|
||||||
//
|
//
|
||||||
//* XXX
|
|
||||||
filter: function(args, body, state, expand=true){
|
filter: function(args, body, state, expand=true){
|
||||||
var that = this
|
var that = this
|
||||||
|
|
||||||
@ -685,12 +685,11 @@ object.Constructor('Page', BasePage, {
|
|||||||
// <text>
|
// <text>
|
||||||
// </include>
|
// </include>
|
||||||
//
|
//
|
||||||
// XXX RECURSION recursion detection is still a bit off...
|
|
||||||
// XXX 'text' argument is changed to 'recursive'...
|
// XXX 'text' argument is changed to 'recursive'...
|
||||||
// XXX revise recursion checks....
|
// XXX revise recursion checks....
|
||||||
// XXX should this be lazy???
|
// XXX should this be lazy???
|
||||||
include: Macro(
|
include: Macro(
|
||||||
['src', 'recursive', ['isolated']],
|
['src', 'recursive', 'join', ['isolated']],
|
||||||
async function(args, body, state, key='included', handler){
|
async function(args, body, state, key='included', handler){
|
||||||
var macro = 'include'
|
var macro = 'include'
|
||||||
if(typeof(args) == 'string'){
|
if(typeof(args) == 'string'){
|
||||||
@ -698,13 +697,18 @@ object.Constructor('Page', BasePage, {
|
|||||||
key = key ?? 'included' }
|
key = key ?? 'included' }
|
||||||
// positional args...
|
// positional args...
|
||||||
var src = args.src
|
var src = args.src
|
||||||
var recursive = args.recursive || body
|
&& await this.parse(args.src, state)
|
||||||
var isolated = args.isolated
|
|
||||||
|
|
||||||
if(!src){
|
if(!src){
|
||||||
return }
|
return }
|
||||||
|
var recursive = args.recursive || body
|
||||||
|
var isolated = args.isolated
|
||||||
|
var join = args.join
|
||||||
|
&& await this
|
||||||
|
// render join block relative to the path before the first '*'...
|
||||||
|
.get(this.path.split(/\*/).shift())
|
||||||
|
.parse(args.join, state)
|
||||||
|
|
||||||
// parse arg values...
|
// parse arg values...
|
||||||
src = await this.parse(src, state)
|
|
||||||
var base = this.get(src).path
|
var base = this.get(src).path
|
||||||
|
|
||||||
handler = handler
|
handler = handler
|
||||||
@ -715,7 +719,8 @@ object.Constructor('Page', BasePage, {
|
|||||||
: this.get(src)
|
: this.get(src)
|
||||||
.parse(state) }
|
.parse(state) }
|
||||||
|
|
||||||
return this.get(src)
|
//return this.get(src)
|
||||||
|
var res = this.get(src)
|
||||||
.each()
|
.each()
|
||||||
.map(async function(page){
|
.map(async function(page){
|
||||||
var full = page.path
|
var full = page.path
|
||||||
@ -740,7 +745,11 @@ object.Constructor('Page', BasePage, {
|
|||||||
if(!parent_seen){
|
if(!parent_seen){
|
||||||
delete state.seen }
|
delete state.seen }
|
||||||
|
|
||||||
return res }) }),
|
return res })
|
||||||
|
return join ?
|
||||||
|
res.between(join)
|
||||||
|
: res }),
|
||||||
|
//*/
|
||||||
// NOTE: the main difference between this and @include is that
|
// NOTE: the main difference between this and @include is that
|
||||||
// this renders the src in the context of current page while
|
// this renders the src in the context of current page while
|
||||||
// include is rendered in the context of its page but with
|
// include is rendered in the context of its page but with
|
||||||
@ -772,6 +781,7 @@ object.Constructor('Page', BasePage, {
|
|||||||
// not expanded...
|
// not expanded...
|
||||||
// NOTE: the filter argument uses the same filters as @filter(..)
|
// NOTE: the filter argument uses the same filters as @filter(..)
|
||||||
//
|
//
|
||||||
|
// XXX need to handle pattern paths (like include: join=...)
|
||||||
// XXX need a way to escape macros -- i.e. include </quote> in a quoted text...
|
// XXX need a way to escape macros -- i.e. include </quote> in a quoted text...
|
||||||
quote: Macro(
|
quote: Macro(
|
||||||
['src', 'filter', 'text'],
|
['src', 'filter', 'text'],
|
||||||
@ -995,16 +1005,16 @@ object.Constructor('Page', BasePage, {
|
|||||||
var join_block = _getBlock('join')
|
var join_block = _getBlock('join')
|
||||||
|
|
||||||
// apply macro text...
|
// apply macro text...
|
||||||
return pages
|
var res = await pages
|
||||||
.map(function(page, i){
|
.map(function(page, i){
|
||||||
return [
|
return that.__parser__.expand(page, text, state) })
|
||||||
that.__parser__.expand(page, text, state),
|
return join_block ?
|
||||||
// weave in the join block...
|
res.between(await that.__parser__.expand(
|
||||||
...((join_block && i < pages.length-1) ?
|
// render join block relative to the path before the first '*'...
|
||||||
[that.__parser__.expand(that, join_block, state)]
|
that.get(that.path.split(/\*/).shift()),
|
||||||
: []),
|
join_block,
|
||||||
] })
|
state))
|
||||||
.flat() } }),
|
: res } }),
|
||||||
|
|
||||||
// nesting rules...
|
// nesting rules...
|
||||||
'else': ['macro'],
|
'else': ['macro'],
|
||||||
@ -1221,26 +1231,38 @@ module.System = {
|
|||||||
// _list: {
|
// _list: {
|
||||||
// text: '<macro src="." join="\n">- @source(.)</macro>' },
|
// text: '<macro src="." join="\n">- @source(.)</macro>' },
|
||||||
//
|
//
|
||||||
|
// XXX all of these should support pattern pages...
|
||||||
_text: {
|
_text: {
|
||||||
text: '<macro src="." join="\n">@include(. isolated)</macro>' },
|
text: '@include(. isolated join="@source(file-separator)")' },
|
||||||
// XXX this does not separate items when getting patterns...
|
// XXX add join...
|
||||||
//text: '@include(. isolated)' },
|
|
||||||
_raw: {
|
_raw: {
|
||||||
text: '@quote(.)' },
|
text: '@quote(.)' },
|
||||||
|
|
||||||
// XXX not sure if this is the right way to go...
|
// XXX not sure if this is the right way to go...
|
||||||
_code: {
|
_code: {
|
||||||
text: '<pre wikiwords="no"><quote filter="quote-tags" src="."/></pre>' },
|
text:
|
||||||
|
'<macro src="." join="@source(file-separator)">'
|
||||||
|
+'<pre wikiwords="no"><quote filter="quote-tags" src="."/></pre>'
|
||||||
|
+'</macro>'},
|
||||||
_ed: {
|
_ed: {
|
||||||
//_edit: {
|
//_edit: {
|
||||||
text:
|
text:
|
||||||
'<pre class="editor" '
|
'<macro src="." join="@source(file-separator)">'
|
||||||
+'wikiwords="no" '
|
+'<pre class="editor" '
|
||||||
+'contenteditable '
|
+'wikiwords="no" '
|
||||||
+'oninput="saveContent(\'@source(./path)\', this.innerText)">'
|
+'contenteditable '
|
||||||
+'<quote filter="quote-tags" src="."/>'
|
+'oninput="saveContent(\'@source(./path)\', this.innerText)">'
|
||||||
+'</pre>' },
|
+'<quote filter="quote-tags" src="."/>'
|
||||||
|
+'</pre>'
|
||||||
|
+'</macro>'},
|
||||||
|
|
||||||
|
paths: {
|
||||||
|
text: '<macro src="../*/path" join=" ">@source(.)</macro>' },
|
||||||
|
|
||||||
|
// page parts...
|
||||||
|
//
|
||||||
|
'line-separator': { text: '<br>' },
|
||||||
|
'file-separator': { text: '<hr>' },
|
||||||
|
|
||||||
// base system pages...
|
// base system pages...
|
||||||
//
|
//
|
||||||
|
|||||||
@ -471,6 +471,7 @@ module.BaseParser = {
|
|||||||
// NOTE: we need to await for ast here as we need stage 2 of
|
// NOTE: we need to await for ast here as we need stage 2 of
|
||||||
// parsing to happen AFTER everything else completes...
|
// parsing to happen AFTER everything else completes...
|
||||||
return await Promise.iter((await ast)
|
return await Promise.iter((await ast)
|
||||||
|
.flat()
|
||||||
// post handlers...
|
// post handlers...
|
||||||
.map(function(section){
|
.map(function(section){
|
||||||
return typeof(section) == 'function' ?
|
return typeof(section) == 'function' ?
|
||||||
|
|||||||
@ -145,8 +145,8 @@ module.BaseStore = {
|
|||||||
pwpath.normalize(path, 'string')
|
pwpath.normalize(path, 'string')
|
||||||
.replace(/^\/|\/$/g, '')
|
.replace(/^\/|\/$/g, '')
|
||||||
.replace(/\//g, '\\/')
|
.replace(/\//g, '\\/')
|
||||||
.replace(/\*\*/g, '.+')
|
.replace(/\*\*/g, '.*')
|
||||||
.replace(/\*/g, '[^\\/]+')
|
.replace(/\*/g, '[^\\/]*')
|
||||||
}(?=[\\\\\/]|$)`)
|
}(?=[\\\\\/]|$)`)
|
||||||
return [...(await this.paths())
|
return [...(await this.paths())
|
||||||
// NOTE: we are not using .filter(..) here as wee
|
// NOTE: we are not using .filter(..) here as wee
|
||||||
|
|||||||
@ -58,6 +58,18 @@ pwiki.store.update('@pouch', {
|
|||||||
// XXX TEST...
|
// XXX TEST...
|
||||||
// XXX add filter tests...
|
// XXX add filter tests...
|
||||||
pwiki.pwiki
|
pwiki.pwiki
|
||||||
|
.update({
|
||||||
|
location: '/test/sort/*',
|
||||||
|
order: ['a', 'c', 'b'], })
|
||||||
|
.update({
|
||||||
|
location: '/test/sort/a',
|
||||||
|
text: 'a', })
|
||||||
|
.update({
|
||||||
|
location: '/test/sort/b',
|
||||||
|
text: 'b', })
|
||||||
|
.update({
|
||||||
|
location: '/test/sort/c',
|
||||||
|
text: 'c', })
|
||||||
.update({
|
.update({
|
||||||
location: '/test/comments',
|
location: '/test/comments',
|
||||||
text: object.doc`
|
text: object.doc`
|
||||||
@ -114,6 +126,16 @@ pwiki.pwiki
|
|||||||
</quote>
|
</quote>
|
||||||
---
|
---
|
||||||
`, })
|
`, })
|
||||||
|
// XXX BUG: this prints '' for each <quote>
|
||||||
|
.update({
|
||||||
|
location: '/test/macro-quote',
|
||||||
|
text: object.doc`
|
||||||
|
Outside of macro:
|
||||||
|
<quote src="/test/a"/>
|
||||||
|
|
||||||
|
In macro:
|
||||||
|
<macro src="/test/*/path" join="\n">-- <quote src="/test/a"/></macro>
|
||||||
|
`, })
|
||||||
.update({
|
.update({
|
||||||
location: '/test/wikiword',
|
location: '/test/wikiword',
|
||||||
text: object.doc`
|
text: object.doc`
|
||||||
|
|||||||
20
pwiki2.js
20
pwiki2.js
@ -1,8 +1,22 @@
|
|||||||
/**********************************************************************
|
/**********************************************************************
|
||||||
*
|
*
|
||||||
*
|
*
|
||||||
* XXX BUG: comments seem to be broken -- see: /Doc/About
|
* XXX BUG: join block gets repeated three times per page...
|
||||||
* XXX BUG: browser: .get('/*').raw hangs...
|
* await p.pwiki.get('/test/sort/*').text
|
||||||
|
* essentially this is the culprit:
|
||||||
|
* await p.pwiki.get('/test/sort/*').parse('@source(file-separator)')
|
||||||
|
* XXX BUG: browser: .get('/*').raw hangs in the browser context...
|
||||||
|
* XXX might be a good idea to add page caching (state.page_cache) relative
|
||||||
|
* to a path on parsing, to avoid re-matching the same page over and
|
||||||
|
* over again from the same context
|
||||||
|
* format:
|
||||||
|
* {
|
||||||
|
* <basedir>: {
|
||||||
|
* <path>: <data>,
|
||||||
|
* ...
|
||||||
|
* },
|
||||||
|
* ...
|
||||||
|
* }
|
||||||
* XXX add action to reset overloaded (bootstrap) pages...
|
* XXX add action to reset overloaded (bootstrap) pages...
|
||||||
* - per page
|
* - per page
|
||||||
* - global
|
* - global
|
||||||
@ -21,7 +35,7 @@
|
|||||||
* - basic editor and interactivity -- DONE
|
* - basic editor and interactivity -- DONE
|
||||||
* - export
|
* - export
|
||||||
* - json -- DONE
|
* - json -- DONE
|
||||||
* - zip
|
* - zip (json/tree)
|
||||||
* - migrate bootstrap
|
* - migrate bootstrap
|
||||||
* - store topology
|
* - store topology
|
||||||
* - sync and sync conf
|
* - sync and sync conf
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user