update to mapbox 0.0.11 master
This commit is contained in:
parent
10b81aa2c8
commit
6dc65cc991
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,2 +1,6 @@
|
||||
/node_modules
|
||||
*.swp
|
||||
.DS_Store
|
||||
test/rendering/layers/
|
||||
test/rendering/cache/
|
||||
test/rendering-mss/npm-debug.log
|
||||
|
@ -1,3 +1,6 @@
|
||||
language: node_js
|
||||
|
||||
node_js:
|
||||
- 0.6
|
||||
- "0.11"
|
||||
- "0.10"
|
||||
- "0.8"
|
||||
|
77
CHANGELOG.md
77
CHANGELOG.md
@ -1,8 +1,81 @@
|
||||
## Changelog
|
||||
|
||||
## XXXX
|
||||
## 0.11.1-dev-browser
|
||||
* added rectangle option for marker-type
|
||||
|
||||
## 0.11.0
|
||||
|
||||
* Switch API to be synchronous. All errors should be caught using try/catch now.
|
||||
|
||||
## 0.10.0
|
||||
|
||||
* Remove automatic inclusion of `maximum-extent` on Map element to allow geometries that are buffered past extent bounds (e.g. dateline).
|
||||
* Bump `mapnik-reference` dependency to ~5.0.9 (with `shield-halo-rasterizer`)
|
||||
|
||||
## 0.9.6
|
||||
|
||||
* Fixed support for `text-face-name` values with `&` like `El&Font Bubble`
|
||||
* Fixed support for filtering on fields containing single quotes. Now `#layer[name="it's"] { ... }` is possible.
|
||||
* Fixed support for filtering on fields containing `&`. Now `#layer["Hello&Goodbye"="yes"] { ... }` is possible.
|
||||
* Added support for exponential notation in filters. Now `#layer[value = 1.2e3] { ... }` is possible.
|
||||
* Bump `mapnik-reference` dependency to ~5.0.8 (with support for Mapnik v2.3.0 and 3.x)
|
||||
|
||||
## 0.9.5
|
||||
|
||||
* Various speed optimizations to help address #20 (#231)
|
||||
* Fixed support for fields that contain the word `zoom` in them (previous clashed with `zoom` keyword)
|
||||
* Fixed support for a space in front of `zoom` keyword (#288)
|
||||
* Improved error messages when color functions encounter invalid color (#309)
|
||||
* The `carto` command line tool now exits cleanly when millstone is used
|
||||
* The `carto` command line tool now only localized with millstone if requested (#243)
|
||||
* Added man page for `carto` (#257)
|
||||
* Fix repeated comments in selectors. Fixes #260
|
||||
* Fixed `image-filter` duplication (#270)
|
||||
* Quote all needed XML chars. See #263.
|
||||
* Added higher tolerance for various characters in field names (#230)
|
||||
* Bump `mapnik-reference` dependency to ~5.0.7 (with support for Mapnik v2.2.0)
|
||||
* Adds compatibility with screen units.
|
||||
* Fixed ability to use carto as global module (#236)
|
||||
* Now using 'console' instead of `util` for `stderr` (#217)
|
||||
|
||||
## 0.9.4
|
||||
|
||||
* Fixes nesting of regex calls
|
||||
|
||||
## 0.9.3
|
||||
|
||||
* Allows `text-face-name` properties to be unquoted
|
||||
* Detects inline Format XML tags in `text-name` and passes such output
|
||||
straight to XML for advanced text names.
|
||||
* Fixes bugs around concatenation of strings in expressions
|
||||
* Fixes parsing of comments in between selectors
|
||||
* Fixes parsing of whitespace in calls
|
||||
* Improved error messages for unknown properties - advises user on
|
||||
the property name most closely matching the incorrect input.
|
||||
* Improved errors for calls, advises user on number of arguments
|
||||
* Fixes instance inheritance - thanks @gravitystorm!
|
||||
|
||||
## 0.9.2
|
||||
|
||||
Tagged Sept 6, 2012
|
||||
|
||||
* Bump `mapnik-reference` dependency to ~5.0.0
|
||||
* Better support for unsigned types in certain Mapnik styling properties
|
||||
|
||||
## 0.9.1
|
||||
|
||||
Tagged Aug 15, 2012
|
||||
|
||||
* Improved error handling for different target `mapnik-reference` versions (strk)
|
||||
* Bump `mapnik-reference` dependency to ~4.0.3
|
||||
* Fixed handling of image-filter syntax as per [Mapnik changes](https://github.com/mapnik/mapnik/issues/1384)
|
||||
|
||||
## 0.9.0
|
||||
|
||||
* Bump `mapnik-reference` dependency to ~4.0.0 to pull in new properties.
|
||||
* Adapted to `comp-op` rename upstream in `mapnik-reference`.
|
||||
* Adapted to `transform` rename upstream in `mapnik-reference` and Mapnik.
|
||||
|
||||
## 0.8.1
|
||||
|
||||
* Bump `mapnik-reference` dependency to ~3.1.0 to pull in new properties.
|
||||
@ -10,7 +83,7 @@
|
||||
## 0.8.0
|
||||
|
||||
* Adds the modulus operator `%` as an option
|
||||
* Adds a new field-type like `[FIELD]` instead of "[FIELD"
|
||||
* Adds a new field-type like `[FIELD]` instead of "[FIELD]"
|
||||
* Supports function syntax for transforms, optionally with variables and arguments.
|
||||
|
||||
### 0.7.1
|
||||
|
34
DEVELOPING.md
Normal file
34
DEVELOPING.md
Normal file
@ -0,0 +1,34 @@
|
||||
## Developing
|
||||
|
||||
Installing:
|
||||
|
||||
git clone git@github.com:mapbox/carto.git
|
||||
npm install
|
||||
|
||||
Test:
|
||||
|
||||
npm test
|
||||
|
||||
Running the head binary:
|
||||
|
||||
./bin/carto
|
||||
|
||||
## Documentation
|
||||
|
||||
This repository contains auto-generated documentation of the content of Carto
|
||||
that's published on Mapbox.com.
|
||||
|
||||
git fetch origin gh-pages:gh-pages
|
||||
|
||||
Edit `_docs/package.json` to point to the head version of [mapnik-reference](https://github.com/mapnik/mapnik-reference).
|
||||
|
||||
cd _docs
|
||||
npm install
|
||||
node generate.js
|
||||
|
||||
Then run up a directory and run the testing server:
|
||||
|
||||
cd ../
|
||||
jekyll serve -p 4000
|
||||
|
||||
Test the new site at `localhost:4000/carto` and if things look good then git add your changes and push.
|
1
Makefile
1
Makefile
@ -3,6 +3,7 @@
|
||||
#
|
||||
|
||||
expresso = ./node_modules/.bin/mocha
|
||||
<<<<<<< HEAD
|
||||
docco = ./node_modules/.bin/docco
|
||||
uglify = ./node_modules/.bin/uglify
|
||||
|
||||
|
217
bin/carto
217
bin/carto
@ -2,51 +2,40 @@
|
||||
|
||||
var path = require('path'),
|
||||
fs = require('fs'),
|
||||
util = require('util'),
|
||||
carto = require('carto');
|
||||
carto = require('../lib/carto'),
|
||||
url = require('url'),
|
||||
_ = require('underscore');
|
||||
|
||||
var args = process.argv.slice(1);
|
||||
var options = { nosymlink:false };
|
||||
var existsSync = require('fs').existsSync || require('path').existsSync
|
||||
|
||||
args = args.filter(function (arg) {
|
||||
var match;
|
||||
var optimist = require('optimist')
|
||||
.usage("Usage: $0 <source MML file>")
|
||||
.options('h', {alias:'help', describe:'Display this help message'})
|
||||
.options('v', {alias:'version', boolean:true, describe:'Display version information'})
|
||||
.options('b', {alias:'benchmark', boolean:true, describe:'Outputs total compile time'})
|
||||
.options('l', {alias:'localize', boolean:true, default:false, describe:'Use millstone to localize resources when loading an MML'})
|
||||
.options('n', {alias:'nosymlink', boolean:true, describe:'Use absolute paths instead of symlinking files'})
|
||||
.options('ppi', {describe:'Pixels per inch used to convert m, mm, cm, in, pt, pc to pixels', default:90.714});
|
||||
|
||||
if (match = arg.match(/^--?([a-z][0-9a-z-]*)$/i)) { arg = match[1] }
|
||||
else { return arg }
|
||||
var options = optimist.argv;
|
||||
|
||||
switch (arg) {
|
||||
case 'v':
|
||||
case 'version':
|
||||
util.puts("carto " + carto.version.join('.') + " (Carto map stylesheet compiler)");
|
||||
process.exit(0);
|
||||
break;
|
||||
case 'b':
|
||||
case 'benchmark':
|
||||
options.benchmark = true;
|
||||
break;
|
||||
case 'n':
|
||||
case 'nosymlink':
|
||||
options.nosymlink = true;
|
||||
break;
|
||||
if (options.help) {
|
||||
optimist.showHelp();
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
default:
|
||||
sys.puts("Usage: carto <source MML file>");
|
||||
sys.puts("Options:");
|
||||
sys.puts(" -v --version Parse JSON map manifest");
|
||||
sys.puts(" -b --benchmark Outputs total compile time");
|
||||
sys.puts(" -n --nosymlink Use absolute paths instead of symlinking files");
|
||||
process.exit(0);
|
||||
break;
|
||||
}
|
||||
});
|
||||
if (options.version) {
|
||||
console.log("carto " + carto.version.join('.') + " (Carto map stylesheet compiler)");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
var input = args[1];
|
||||
var input = options._[0];
|
||||
if (input && input[0] != '/') {
|
||||
input = path.join(process.cwd(), input);
|
||||
}
|
||||
|
||||
if (!input) {
|
||||
util.puts("carto: no input files");
|
||||
console.log("carto: no input files ('carto -h or --help' for help)");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
@ -54,62 +43,120 @@ if (options.benchmark) {
|
||||
var start = +new Date;
|
||||
}
|
||||
|
||||
var ext = path.extname(input);
|
||||
|
||||
if (!ext) {
|
||||
console.log("carto: please pass either a .mml file or .mss file");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!existsSync(input)) {
|
||||
console.log("carto: file does not exist: '" + input + "'");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function compileMML(err, data) {
|
||||
// force drain the millstone download pool now
|
||||
// to ensure we can exit without waiting
|
||||
if (options.localize && millstone.drainPool) {
|
||||
millstone.drainPool(function() {});
|
||||
}
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
var renderer = new carto.Renderer({
|
||||
filename: input,
|
||||
benchmark: options.benchmark,
|
||||
ppi: options.ppi
|
||||
});
|
||||
try {
|
||||
var output = renderer.render(data);
|
||||
} catch (e) {
|
||||
if (e.stack) {
|
||||
console.error(e.stack);
|
||||
} else {
|
||||
console.error(e);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
if (!options.benchmark) {
|
||||
console.log(output);
|
||||
} else {
|
||||
var duration = (+new Date) - start;
|
||||
console.log('TOTAL: ' + (duration) + 'ms');
|
||||
}
|
||||
};
|
||||
|
||||
function compileMSS(err, data) {
|
||||
if (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
var renderer = new carto.Renderer({
|
||||
filename: path.basename(input),
|
||||
benchmark: options.benchmark,
|
||||
ppi: options.ppi
|
||||
});
|
||||
try {
|
||||
var output = renderer.renderMSS(data);
|
||||
} catch (e) {
|
||||
if (e.stack) {
|
||||
console.error(e.stack);
|
||||
} else {
|
||||
console.error(e);
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
if (!options.benchmark) {
|
||||
console.log(output);
|
||||
} else {
|
||||
var duration = (+new Date) - start;
|
||||
console.log('TOTAL: ' + (duration) + 'ms');
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
var data = fs.readFileSync(input, 'utf-8');
|
||||
} catch(err) {
|
||||
util.puts("carto: " + err.message.replace(/^[A-Z]+, /, ''));
|
||||
console.error("carto: " + err.message.replace(/^[A-Z]+, /, ''));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
try {
|
||||
data = JSON.parse(data);
|
||||
} catch(err) {
|
||||
util.puts("carto: " + err.message.replace(/^[A-Z]+, /, ''));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
var millstone = undefined;
|
||||
|
||||
try {
|
||||
require.resolve('millstone');
|
||||
millstone = require('millstone');
|
||||
} catch (err) {
|
||||
sys.puts('carto: Millstone not found. ' + err.message.replace(/^[A-Z]+, /, ''));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
millstone.resolve({
|
||||
mml: data,
|
||||
base: path.dirname(input),
|
||||
cache: path.join(path.dirname(input), 'cache'),
|
||||
nosymlink: options.nosymlink
|
||||
}, compile);
|
||||
|
||||
function compile(err, data) {
|
||||
if (err) throw err;
|
||||
if (ext == '.mml') {
|
||||
try {
|
||||
new carto.Renderer({
|
||||
filename: input,
|
||||
benchmark: options.benchmark
|
||||
}).render(data, function(err, output) {
|
||||
if (err) {
|
||||
console.log(err);
|
||||
throw err;
|
||||
process.exit(1);
|
||||
} else {
|
||||
if (!options.benchmark) {
|
||||
util.puts(output);
|
||||
} else {
|
||||
var duration = (+new Date) - start;
|
||||
console.log('TOTAL: ' + (duration) + 'ms');
|
||||
}
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
if (e.stack) {
|
||||
util.error(e.stack);
|
||||
} else {
|
||||
util.error(e);
|
||||
}
|
||||
data = JSON.parse(data);
|
||||
} catch(err) {
|
||||
console.error("carto: " + err.message.replace(/^[A-Z]+, /, ''));
|
||||
process.exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
if (options.localize) {
|
||||
var millstone = undefined;
|
||||
try {
|
||||
require.resolve('millstone');
|
||||
millstone = require('millstone');
|
||||
} catch (err) {
|
||||
console.error('carto: Millstone not found, required if localizing stylesheet resources. ' + err.message.replace(/^[A-Z]+, /, ''));
|
||||
process.exit(1);
|
||||
}
|
||||
millstone.resolve({
|
||||
mml: data,
|
||||
base: path.dirname(input),
|
||||
cache: path.join(path.dirname(input), 'cache'),
|
||||
nosymlink: options.nosymlink
|
||||
}, compileMML);
|
||||
} else {
|
||||
data.Stylesheet = data.Stylesheet.map(function(x) {
|
||||
if (typeof x !== 'string') {
|
||||
return { id: x, data: x.data }
|
||||
}
|
||||
return { id: x, data: fs.readFileSync(path.join(path.dirname(input), x), 'utf8') }
|
||||
});
|
||||
compileMML(null,data);
|
||||
}
|
||||
} else if (ext == '.mss') {
|
||||
compileMSS(null,data);
|
||||
} else {
|
||||
console.log("carto: please pass either a .mml file or .mss file");
|
||||
}
|
||||
|
@ -25,17 +25,20 @@ fs.readFile(process.argv[2], 'utf-8', function(err, data) {
|
||||
});
|
||||
|
||||
function addAttributes(obj) {
|
||||
if (obj['@']) for (var key in obj['@']) obj[key] = obj['@'][key];
|
||||
delete obj['@'];
|
||||
if (obj['$']) for (var key in obj['$']) obj[key] = obj['$'][key];
|
||||
delete obj['$'];
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
||||
function simplifyExternal(obj) {
|
||||
if (obj.src) return obj.src;
|
||||
else return obj;
|
||||
}
|
||||
|
||||
var parser = new xml2js.Parser();
|
||||
var parser = new xml2js.Parser({
|
||||
explicitRoot: false,
|
||||
explicitArray: false
|
||||
});
|
||||
parser.addListener('end', function(json) {
|
||||
console.log(JSON.stringify(json, function(key, value) {
|
||||
if (!key) {
|
||||
@ -52,7 +55,7 @@ fs.readFile(process.argv[2], 'utf-8', function(err, data) {
|
||||
else if (key === 'Datasource') {
|
||||
value = addAttributes(value);
|
||||
value.Parameter.forEach(function(parameter) {
|
||||
value[parameter['@'].name] = parameter['#'];
|
||||
value[parameter['$'].name] = parameter['_'];
|
||||
});
|
||||
delete value.Parameter;
|
||||
return value;
|
||||
@ -63,4 +66,4 @@ fs.readFile(process.argv[2], 'utf-8', function(err, data) {
|
||||
}, 4));
|
||||
});
|
||||
parser.parseString(data);
|
||||
});
|
||||
});
|
||||
|
@ -264,11 +264,11 @@
|
||||
</dict>
|
||||
</dict>
|
||||
<key>match</key>
|
||||
<string>\b(background-color|background-image|srs|buffer|font-directory|polygon-fill|polygon-gamma|polygon-opacity|polygon-meta-output|polygon-meta-writer|line-color|line-width|line-opacity|line-join|line-cap|line-gamma|line-dasharray|line-meta-output|line-meta-writer|marker-file|marker-opacity|marker-line-color|marker-line-width|marker-line-opacity|marker-placement|marker-type|marker-width|marker-height|marker-fill|marker-allow-overlap|marker-spacing|marker-max-error|marker-transform|marker-meta-output|marker-meta-writer|shield-name|shield-face-name|shield-size|shield-fill|shield-min-distance|shield-halo-fill|shield-halo-radius|shield-spacing|shield-character-spacing|shield-line-spacing|shield-file|shield-width|shield-height|shield-type|shield-text-dx|shield-text-dy|shield-dx|shield-dy|shield-meta-output|shield-meta-writer|line-pattern-file|line-pattern-width|line-pattern-height|line-pattern-type|line-pattern-meta-output|line-pattern-meta-writer|polygon-pattern-file|polygon-pattern-width|polygon-pattern-height|polygon-pattern-type|polygon-pattern-meta-output|polygon-pattern-meta-writer|raster-opacity|raster-mode|raster-scaling|point-file|point-width|point-height|point-type|point-allow-overlap|point-placement|point-meta-output|point-meta-writer|text-name|text-face-name|text-size|text-ratio|text-wrap-width|text-spacing|text-character-spacing|text-line-spacing|text-label-position-tolerance|text-max-char-angle-delta|text-fill|text-halo-fill|text-halo-radius|text-dx|text-dy|text-avoid-edges|text-min-distance|text-min-padding|text-allow-overlap|text-placement|text-placement-type|text-placements|text-transform|text-meta-output|text-meta-writer|building-fill|building-fill-opacity|building-height)\s*:</string>
|
||||
<string>\b(background-color|background-image|srs|buffer|font-directory|polygon-fill|polygon-gamma|polygon-opacity|polygon-meta-output|polygon-meta-writer|line-color|line-width|line-opacity|line-join|line-cap|line-gamma|line-dasharray|line-meta-output|line-meta-writer|marker-file|marker-opacity|marker-line-color|marker-line-width|marker-line-opacity|marker-placement|marker-type|marker-width|marker-height|marker-fill|marker-allow-overlap|marker-spacing|marker-max-error|marker-transform|marker-meta-output|marker-meta-writer|shield-name|shield-face-name|shield-size|shield-fill|shield-min-distance|shield-halo-fill|shield-halo-radius|shield-spacing|shield-character-spacing|shield-line-spacing|shield-file|shield-width|shield-height|shield-type|shield-text-dx|shield-text-dy|shield-dx|shield-dy|shield-meta-output|shield-meta-writer|line-pattern-file|line-pattern-width|line-pattern-height|line-pattern-type|line-pattern-meta-output|line-pattern-meta-writer|polygon-pattern-file|polygon-pattern-width|polygon-pattern-height|polygon-pattern-type|polygon-pattern-meta-output|polygon-pattern-meta-writer|raster-opacity|raster-comp-op|raster-scaling|point-file|point-width|point-height|point-type|point-allow-overlap|point-placement|point-meta-output|point-meta-writer|text-name|text-face-name|text-size|text-ratio|text-wrap-width|text-spacing|text-character-spacing|text-line-spacing|text-label-position-tolerance|text-max-char-angle-delta|text-fill|text-halo-fill|text-halo-radius|text-dx|text-dy|text-avoid-edges|text-min-distance|text-min-padding|text-allow-overlap|text-placement|text-placement-type|text-placements|text-transform|text-meta-output|text-meta-writer|building-fill|building-fill-opacity|building-height)\s*:</string>
|
||||
</dict>
|
||||
<dict>
|
||||
<key>match</key>
|
||||
<string>\b(miter|round|bevel|butt|round|square|point|line|arrow|ellipse|png|jpg|svg|normal|grain_merge|grain_merge2|multiply|multiply2|divide|divide2|screen|hard_light|fast|bilinear|bilinear8|centroid|interior|point|line|vertex|interior|none|uppercase|lowercase)\b</string>
|
||||
<string>\b(miter|round|bevel|butt|round|square|point|line|arrow|ellipse|png|jpg|svg|normal|grain_merge|grain_merge2|multiply|multiply2|divide|divide2|screen|hard_light|near|bilinear|bilinear8|centroid|interior|point|line|vertex|interior|none|uppercase|lowercase)\b</string>
|
||||
<key>name</key>
|
||||
<string>meta.property-value.carto</string>
|
||||
</dict>
|
||||
|
@ -2,8 +2,7 @@
|
||||
|
||||
var path = require('path'),
|
||||
fs = require('fs'),
|
||||
_ = require('underscore')._,
|
||||
sys = require('sys');
|
||||
_ = require('underscore')._;
|
||||
|
||||
var carto = require('../lib/carto');
|
||||
|
||||
|
@ -47,7 +47,7 @@ syn region cartoFontDescriptorFunction contained matchgroup=cartoFunctionName st
|
||||
syn match cartoUnicodeRange contained "U+[0-9A-Fa-f?]\+"
|
||||
syn match cartoUnicodeRange contained "U+\x\+-\x\+"
|
||||
|
||||
syn match cartoKeywordAttr "/\|miter\|round\|bevel\|butt\|round\|square\|point\|line\|arrow\|ellipse\|point\|line\|vertex\|interior\|local\|global\|normal\|grain_merge\|grain_merge2\|multiply\|multiply2\|divide\|divide2\|screen\|hard_light\|fast\|bilinear\|bilinear8\|bicubic\|spline16\|gaussian\|lanczos\|centroid\|interior\|top\|middle\|bottom\|point\|line\|vertex\|interior\|dummy\|simple\|none\|uppercase\|lowercase\|capitalize\|/"
|
||||
syn match cartoKeywordAttr "/\|miter\|round\|bevel\|butt\|round\|square\|point\|line\|arrow\|ellipse\|point\|line\|vertex\|interior\|local\|global\|normal\|grain_merge\|grain_merge2\|multiply\|multiply2\|divide\|divide2\|screen\|hard_light\|near\|bilinear\|bilinear8\|bicubic\|spline16\|gaussian\|lanczos\|centroid\|interior\|top\|middle\|bottom\|point\|line\|vertex\|interior\|dummy\|simple\|none\|uppercase\|lowercase\|capitalize\|/"
|
||||
|
||||
" syn keyword cartoColor contained {{#colors}}{{.}} {{/colors}}
|
||||
syn match cartoColor "/\|aliceblue\|antiquewhite\|aqua\|aquamarine\|azure\|beige\|bisque\|black\|blanchedalmond\|blue\|blueviolet\|brown\|burlywood\|cadetblue\|chartreuse\|chocolate\|coral\|cornflowerblue\|cornsilk\|crimson\|cyan\|darkblue\|darkcyan\|darkgoldenrod\|darkgray\|darkgreen\|darkgrey\|darkkhaki\|darkmagenta\|darkolivegreen\|darkorange\|darkorchid\|darkred\|darksalmon\|darkseagreen\|darkslateblue\|darkslategrey\|darkturquoise\|darkviolet\|deeppink\|deepskyblue\|dimgray\|dimgrey\|dodgerblue\|firebrick\|floralwhite\|forestgreen\|fuchsia\|gainsboro\|ghostwhite\|gold\|goldenrod\|gray\|grey\|green\|greenyellow\|honeydew\|hotpink\|indianred\|indigo\|ivory\|khaki\|lavender\|lavenderblush\|lawngreen\|lemonchiffon\|lightblue\|lightcoral\|lightcyan\|lightgoldenrodyellow\|lightgray\|lightgreen\|lightgrey\|lightpink\|lightsalmon\|lightseagreen\|lightskyblue\|lightslategray\|lightslategrey\|lightsteelblue\|lightyellow\|lime\|limegreen\|linen\|magenta\|maroon\|mediumaquamarine\|mediumblue\|mediumorchid\|mediumpurple\|mediumseagreen\|mediumslateblue\|mediumspringgreen\|mediumturquoise\|mediumvioletred\|midnightblue\|mintcream\|mistyrose\|moccasin\|navajowhite\|navy\|oldlace\|olive\|olivedrab\|orange\|orangered\|orchid\|palegoldenrod\|palegreen\|paleturquoise\|palevioletred\|papayawhip\|peachpuff\|peru\|pink\|plum\|powderblue\|purple\|red\|rosybrown\|royalblue\|saddlebrown\|salmon\|sandybrown\|seagreen\|seashell\|sienna\|silver\|skyblue\|slateblue\|slategray\|slategrey\|snow\|springgreen\|steelblue\|tan\|teal\|thistle\|tomato\|turquoise\|violet\|wheat\|white\|whitesmoke\|yellow\|yellowgreen\|transparent\|/"
|
||||
|
@ -7,17 +7,38 @@ tree.functions = {
|
||||
rgba: function (r, g, b, a) {
|
||||
var rgb = [r, g, b].map(function (c) { return number(c); });
|
||||
a = number(a);
|
||||
if (rgb.some(isNaN) || isNaN(a)) return null;
|
||||
return new tree.Color(rgb, a);
|
||||
},
|
||||
// Only require val
|
||||
stop: function (val) {
|
||||
var color, mode;
|
||||
if (arguments.length > 1) color = arguments[1];
|
||||
if (arguments.length > 2) mode = arguments[2];
|
||||
|
||||
return {
|
||||
is: 'tag',
|
||||
val: val,
|
||||
color: color,
|
||||
mode: mode,
|
||||
toString: function(env) {
|
||||
return '\n\t<stop value="' + val.ev(env) + '"' +
|
||||
(color ? ' color="' + color.ev(env) + '" ' : '') +
|
||||
(mode ? ' mode="' + mode.ev(env) + '" ' : '') +
|
||||
'/>';
|
||||
}
|
||||
};
|
||||
},
|
||||
hsl: function (h, s, l) {
|
||||
return this.hsla(h, s, l, 1.0);
|
||||
},
|
||||
hsla: function (h, s, l, a) {
|
||||
h = (number(h) % 360) / 360;
|
||||
s = number(s); l = number(l); a = number(a);
|
||||
if ([h, s, l, a].some(isNaN)) return null;
|
||||
|
||||
var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s;
|
||||
var m1 = l * 2 - m2;
|
||||
var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s,
|
||||
m1 = l * 2 - m2;
|
||||
|
||||
return this.rgba(hue(h + 1/3) * 255,
|
||||
hue(h) * 255,
|
||||
@ -33,18 +54,23 @@ tree.functions = {
|
||||
}
|
||||
},
|
||||
hue: function (color) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
return new tree.Dimension(Math.round(color.toHSL().h));
|
||||
},
|
||||
saturation: function (color) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
return new tree.Dimension(Math.round(color.toHSL().s * 100), '%');
|
||||
},
|
||||
lightness: function (color) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
return new tree.Dimension(Math.round(color.toHSL().l * 100), '%');
|
||||
},
|
||||
alpha: function (color) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
return new tree.Dimension(color.toHSL().a);
|
||||
},
|
||||
saturate: function (color, amount) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
var hsl = color.toHSL();
|
||||
|
||||
hsl.s += amount.value / 100;
|
||||
@ -52,6 +78,7 @@ tree.functions = {
|
||||
return hsla(hsl);
|
||||
},
|
||||
desaturate: function (color, amount) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
var hsl = color.toHSL();
|
||||
|
||||
hsl.s -= amount.value / 100;
|
||||
@ -59,6 +86,7 @@ tree.functions = {
|
||||
return hsla(hsl);
|
||||
},
|
||||
lighten: function (color, amount) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
var hsl = color.toHSL();
|
||||
|
||||
hsl.l += amount.value / 100;
|
||||
@ -66,6 +94,7 @@ tree.functions = {
|
||||
return hsla(hsl);
|
||||
},
|
||||
darken: function (color, amount) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
var hsl = color.toHSL();
|
||||
|
||||
hsl.l -= amount.value / 100;
|
||||
@ -73,6 +102,7 @@ tree.functions = {
|
||||
return hsla(hsl);
|
||||
},
|
||||
fadein: function (color, amount) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
var hsl = color.toHSL();
|
||||
|
||||
hsl.a += amount.value / 100;
|
||||
@ -80,6 +110,7 @@ tree.functions = {
|
||||
return hsla(hsl);
|
||||
},
|
||||
fadeout: function (color, amount) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
var hsl = color.toHSL();
|
||||
|
||||
hsl.a -= amount.value / 100;
|
||||
@ -87,6 +118,7 @@ tree.functions = {
|
||||
return hsla(hsl);
|
||||
},
|
||||
spin: function (color, amount) {
|
||||
if (!('toHSL' in color)) return null;
|
||||
var hsl = color.toHSL();
|
||||
var hue = (hsl.h + amount.value) % 360;
|
||||
|
||||
@ -142,7 +174,7 @@ var image_filter_functors = [
|
||||
'x-gradient', 'y-gradient', 'sharpen'];
|
||||
|
||||
for (var i = 0; i < image_filter_functors.length; i++) {
|
||||
var f = image_filter_functors[i];
|
||||
var f = image_filter_functors[i];
|
||||
tree.functions[f] = (function(f) {
|
||||
return function() {
|
||||
return new tree.ImageFilter(f);
|
||||
@ -154,8 +186,12 @@ tree.functions['agg-stack-blur'] = function(x, y) {
|
||||
return new tree.ImageFilter('agg-stack-blur', [x, y]);
|
||||
};
|
||||
|
||||
function hsla(hsla) {
|
||||
return tree.functions.hsla(hsla.h, hsla.s, hsla.l, hsla.a);
|
||||
tree.functions['scale-hsla'] = function(h0,h1,s0,s1,l0,l1,a0,a1) {
|
||||
return new tree.ImageFilter('scale-hsla', [h0,h1,s0,s1,l0,l1,a0,a1]);
|
||||
};
|
||||
|
||||
function hsla(h) {
|
||||
return tree.functions.hsla(h.h, h.s, h.l, h.a);
|
||||
}
|
||||
|
||||
function number(n) {
|
||||
@ -164,7 +200,7 @@ function number(n) {
|
||||
} else if (typeof(n) === 'number') {
|
||||
return n;
|
||||
} else {
|
||||
throw new Error('Color functions take numbers as parameters.');
|
||||
return NaN;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
var util = require('util');
|
||||
|
||||
[ 'call', 'color', 'comment', 'definition', 'dimension',
|
||||
'directive', 'element', 'expression', 'filterset', 'filter', 'field',
|
||||
'element', 'expression', 'filterset', 'filter', 'field',
|
||||
'keyword', 'layer', 'literal', 'operation', 'quoted', 'imagefilter',
|
||||
'reference', 'rule', 'ruleset', 'selector', 'style', 'url', 'value',
|
||||
'variable', 'zoom', 'invalid', 'fontset', 'frame_offset'
|
||||
@ -9,8 +9,13 @@ var util = require('util');
|
||||
require('./tree/' + n);
|
||||
});
|
||||
|
||||
|
||||
function getVersion() {
|
||||
return '0.0.1-browser';
|
||||
}
|
||||
|
||||
var carto = {
|
||||
version: [0, 4, 7],
|
||||
version: getVersion(),
|
||||
Parser: require('./parser').Parser,
|
||||
Renderer: require('./renderer').Renderer,
|
||||
tree: require('./tree'),
|
||||
@ -49,7 +54,7 @@ var carto = {
|
||||
error = options.indent + error.join('\n' + options.indent) + '\033[0m\n';
|
||||
|
||||
message = options.indent + message + stylize(ctx.message, 'red');
|
||||
ctx.filename && (message += stylize(' in ', 'red') + ctx.filename);
|
||||
if (ctx.filename) (message += stylize(' in ', 'red') + ctx.filename);
|
||||
|
||||
util.error(message, error);
|
||||
|
||||
@ -80,4 +85,3 @@ function stylize(str, style) {
|
||||
return '\033[' + styles[style][0] + 'm' + str +
|
||||
'\033[' + styles[style][1] + 'm';
|
||||
}
|
||||
|
||||
|
@ -40,7 +40,6 @@ if (typeof(exports) !== 'undefined') {
|
||||
// Token matching is done with the `$` function, which either takes
|
||||
// a terminal string or regexp, or a non-terminal function to call.
|
||||
// It also takes care of moving all the indices forwards.
|
||||
|
||||
carto.Parser = function Parser(env) {
|
||||
var input, // LeSS input string
|
||||
i, // current index in `input`
|
||||
@ -58,29 +57,6 @@ carto.Parser = function Parser(env) {
|
||||
// have been imported through `@import`.
|
||||
var finish = function() {};
|
||||
|
||||
var imports = this.imports = {
|
||||
paths: env && env.paths || [], // Search paths, when importing
|
||||
queue: [], // Files which haven't been imported yet
|
||||
files: {}, // Holds the imported parse trees
|
||||
mime: env && env.mime, // MIME type of .carto files
|
||||
push: function(path, callback) {
|
||||
var that = this;
|
||||
this.queue.push(path);
|
||||
|
||||
//
|
||||
// Import a file asynchronously
|
||||
//
|
||||
carto.Parser.importer(path, this.paths, function(root) {
|
||||
that.queue.splice(that.queue.indexOf(path), 1); // Remove the path from the queue
|
||||
that.files[path] = root; // Store the root
|
||||
|
||||
callback(root);
|
||||
|
||||
if (that.queue.length === 0) { finish(); } // Call `finish` if we're done importing
|
||||
}, env);
|
||||
}
|
||||
};
|
||||
|
||||
function save() {
|
||||
temp = chunks[j];
|
||||
memo = i;
|
||||
@ -104,17 +80,12 @@ carto.Parser = function Parser(env) {
|
||||
function $(tok) {
|
||||
var match, args, length, c, index, endIndex, k;
|
||||
|
||||
//
|
||||
// Non-terminal
|
||||
//
|
||||
if (tok instanceof Function) {
|
||||
return tok.call(parser.parsers);
|
||||
//
|
||||
// Terminal
|
||||
//
|
||||
// Either match a single character in the input,
|
||||
// or match a regexp in the current chunk (chunk[j]).
|
||||
//
|
||||
// Either match a single character in the input,
|
||||
// or match a regexp in the current chunk (chunk[j]).
|
||||
} else if (typeof(tok) === 'string') {
|
||||
match = input.charAt(i) === tok ? tok : null;
|
||||
length = 1;
|
||||
@ -162,11 +133,7 @@ carto.Parser = function Parser(env) {
|
||||
if (typeof(tok) === 'string') {
|
||||
return input.charAt(i) === tok;
|
||||
} else {
|
||||
if (tok.test(chunks[j])) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return !!tok.test(chunks[j]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -210,9 +177,8 @@ carto.Parser = function Parser(env) {
|
||||
this.env.inputs = this.env.inputs || {};
|
||||
|
||||
// The Parser
|
||||
return parser = {
|
||||
parser = {
|
||||
|
||||
imports: imports,
|
||||
extractErrorLine: extractErrorLine,
|
||||
//
|
||||
// Parse an input string into an abstract syntax tree.
|
||||
@ -228,17 +194,19 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
|
||||
var early_exit = false;
|
||||
|
||||
// Split the input into chunks.
|
||||
chunks = (function(chunks) {
|
||||
chunks = (function (chunks) {
|
||||
var j = 0,
|
||||
skip = /[^"'`\{\}\/]+/g,
|
||||
skip = /(?:@\{[\w-]+\}|[^"'`\{\}\/\(\)\\])+/g,
|
||||
comment = /\/\*(?:[^*]|\*+[^\/*])*\*+\/|\/\/.*/g,
|
||||
string = /"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'|`((?:[^`]|\\.)*)`/g,
|
||||
level = 0,
|
||||
match,
|
||||
chunk = chunks[0],
|
||||
inString;
|
||||
inParam;
|
||||
|
||||
chunker: for (var i = 0, c, cc; i < input.length; i++) {
|
||||
for (var i = 0, c, cc; i < input.length;) {
|
||||
skip.lastIndex = i;
|
||||
if (match = skip.exec(input)) {
|
||||
if (match.index === i) {
|
||||
@ -247,53 +215,58 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
}
|
||||
c = input.charAt(i);
|
||||
comment.lastIndex = i;
|
||||
comment.lastIndex = string.lastIndex = i;
|
||||
|
||||
if (!inString && c === '/') {
|
||||
if (match = string.exec(input)) {
|
||||
if (match.index === i) {
|
||||
i += match[0].length;
|
||||
chunk.push(match[0]);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (!inParam && c === '/') {
|
||||
cc = input.charAt(i + 1);
|
||||
if (cc === '/' || cc === '*') {
|
||||
if (match = comment.exec(input)) {
|
||||
if (match.index === i) {
|
||||
i += match[0].length - 1;
|
||||
i += match[0].length;
|
||||
chunk.push(match[0]);
|
||||
c = input.charAt(i);
|
||||
continue chunker;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (c === '{' && !inString) { level++;
|
||||
chunk.push(c);
|
||||
} else if (c === '}' && !inString) { level--;
|
||||
chunk.push(c);
|
||||
chunks[++j] = chunk = [];
|
||||
} else {
|
||||
if (c === '"' || c === "'" || c === '`') {
|
||||
if (! inString) {
|
||||
inString = c;
|
||||
} else {
|
||||
inString = inString === c ? false : inString;
|
||||
}
|
||||
}
|
||||
chunk.push(c);
|
||||
switch (c) {
|
||||
case '{': if (! inParam) { level ++; chunk.push(c); break; }
|
||||
case '}': if (! inParam) { level --; chunk.push(c); chunks[++j] = chunk = []; break; }
|
||||
case '(': if (! inParam) { inParam = true; chunk.push(c); break; }
|
||||
case ')': if ( inParam) { inParam = false; chunk.push(c); break; }
|
||||
default: chunk.push(c);
|
||||
}
|
||||
|
||||
i++;
|
||||
}
|
||||
if (level > 0) {
|
||||
// TODO: make invalid instead
|
||||
throw makeError({
|
||||
message: 'Missing closing `}`',
|
||||
index: i
|
||||
});
|
||||
if (level !== 0) {
|
||||
error = {
|
||||
index: i - 1,
|
||||
type: 'Parse',
|
||||
message: (level > 0) ? "missing closing `}`" : "missing opening `{`"
|
||||
};
|
||||
}
|
||||
|
||||
return chunks.map(function(c) { return c.join(''); });
|
||||
return chunks.map(function (c) { return c.join(''); });
|
||||
})([[]]);
|
||||
|
||||
if (error) {
|
||||
throw makeError(error);
|
||||
}
|
||||
|
||||
// Start with the primary rule.
|
||||
// The whole syntax tree is held under a Ruleset node,
|
||||
// with the `root` property set to true, so no `{}` are
|
||||
// output. The callback is called when the input is parsed.
|
||||
// output.
|
||||
root = new tree.Ruleset([], $(this.parsers.primary));
|
||||
root.root = true;
|
||||
|
||||
@ -312,6 +285,7 @@ carto.Parser = function Parser(env) {
|
||||
};
|
||||
env.frames = env.frames || [];
|
||||
|
||||
|
||||
// call populates Invalid-caused errors
|
||||
var definitions = this.flatten([], [], env);
|
||||
definitions.sort(specificitySort);
|
||||
@ -336,23 +310,9 @@ carto.Parser = function Parser(env) {
|
||||
return bs[3] - as[3];
|
||||
};
|
||||
|
||||
// If `i` is smaller than the `input.length - 1`,
|
||||
// it means the parser wasn't able to parse the whole
|
||||
// string, so we've got a parsing error.
|
||||
//
|
||||
// We try to extract a \n delimited string,
|
||||
// showing the line where the parse error occured.
|
||||
// We split it up into two parts (the part which parsed,
|
||||
// and the part which didn't), so we can color them differently.
|
||||
if (i < input.length - 1) throw makeError({
|
||||
message:'Parse error.',
|
||||
index:i
|
||||
});
|
||||
|
||||
return root;
|
||||
},
|
||||
|
||||
//
|
||||
// Here in, the parsing rules/functions
|
||||
//
|
||||
// The basic structure of the syntax tree generated is as follows:
|
||||
@ -362,9 +322,7 @@ carto.Parser = function Parser(env) {
|
||||
// In general, most rules will try to parse a token with the `$()` function, and if the return
|
||||
// value is truly, will return a new node, of the relevant type. Sometimes, we need to check
|
||||
// first, before parsing, that's when we use `peek()`.
|
||||
//
|
||||
parsers: {
|
||||
//
|
||||
// The `primary` rule is the *entry* and *exit* point of the parser.
|
||||
// The rules here can appear at any level of the parse tree.
|
||||
//
|
||||
@ -378,14 +336,13 @@ carto.Parser = function Parser(env) {
|
||||
//
|
||||
// Only at one point is the primary rule not called from the
|
||||
// block rule: at the root level.
|
||||
//
|
||||
primary: function() {
|
||||
var node, root = [];
|
||||
|
||||
while ((node = $(this.rule) || $(this.ruleset) ||
|
||||
$(this.comment)) ||
|
||||
$(/^[\s\n]+/) || (node = $(this.invalid))) {
|
||||
node && root.push(node);
|
||||
if (node) root.push(node);
|
||||
}
|
||||
return root;
|
||||
},
|
||||
@ -395,7 +352,7 @@ carto.Parser = function Parser(env) {
|
||||
|
||||
// To fail gracefully, match everything until a semicolon or linebreak.
|
||||
if (chunk) {
|
||||
return new(tree.Invalid)(chunk, memo);
|
||||
return new tree.Invalid(chunk, memo);
|
||||
}
|
||||
},
|
||||
|
||||
@ -414,15 +371,10 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
},
|
||||
|
||||
//
|
||||
// Entities are tokens which can be found inside an Expression
|
||||
//
|
||||
entities: {
|
||||
//
|
||||
// A string, which supports escaping " and '
|
||||
//
|
||||
// "milky way" 'he\'s the one!'
|
||||
//
|
||||
|
||||
// A string, which supports escaping " and ' "milky way" 'he\'s the one!'
|
||||
quoted: function() {
|
||||
if (input.charAt(i) !== '"' && input.charAt(i) !== "'") return;
|
||||
var str = $(/^"((?:[^"\\\r\n]|\\.)*)"|'((?:[^'\\\r\n]|\\.)*)'/);
|
||||
@ -431,15 +383,12 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
},
|
||||
|
||||
// A reference to a Mapnik field, like
|
||||
//
|
||||
// [NAME]
|
||||
//
|
||||
// A reference to a Mapnik field, like [NAME]
|
||||
// Behind the scenes, this has the same representation, but Carto
|
||||
// needs to be careful to warn when unsupported operations are used.
|
||||
field: function() {
|
||||
if (! $('[')) return;
|
||||
var field_name = $(/(^[a-zA-Z0-9\-_]+)/);
|
||||
var field_name = $(/(^[^\]]+)/);
|
||||
if (! $(']')) return;
|
||||
if (field_name) return new tree.Field(field_name[1]);
|
||||
},
|
||||
@ -452,41 +401,38 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
},
|
||||
|
||||
// A catch-all word, such as:
|
||||
//
|
||||
// hard-light
|
||||
//
|
||||
// These can start with either a letter or a dash (-),
|
||||
// and then contain numbers, underscores, and letters.
|
||||
// A catch-all word, such as: hard-light
|
||||
// These can start with either a letter or a dash (-),
|
||||
// and then contain numbers, underscores, and letters.
|
||||
keyword: function() {
|
||||
var k = $(/^[A-Za-z-]+[A-Za-z-0-9_]*/);
|
||||
if (k) { return new tree.Keyword(k); }
|
||||
},
|
||||
|
||||
// A function call
|
||||
//
|
||||
// rgb(255, 0, 255)
|
||||
//
|
||||
// A function call like rgb(255, 0, 255)
|
||||
// The arguments are parsed with the `entities.arguments` parser.
|
||||
call: function() {
|
||||
var name, args;
|
||||
|
||||
if (! (name = /^([\w\-]+|%)\(/.exec(chunks[j]))) return;
|
||||
if (!(name = /^([\w\-]+|%)\(/.exec(chunks[j]))) return;
|
||||
|
||||
name = name[1].toLowerCase();
|
||||
name = name[1];
|
||||
|
||||
if (name === 'url') {
|
||||
// url() is handled by the url parser instead
|
||||
return null;
|
||||
} else {
|
||||
i += name.length + 1;
|
||||
i += name.length;
|
||||
}
|
||||
|
||||
args = $(this.entities.arguments);
|
||||
$('('); // Parse the '(' and consume whitespace.
|
||||
|
||||
args = $(this.entities['arguments']);
|
||||
|
||||
if (!$(')')) return;
|
||||
|
||||
if (name) {
|
||||
return new tree.Call(name, args, i);
|
||||
return new tree.Call(name, args, i);
|
||||
}
|
||||
},
|
||||
// Arguments are comma-separated expressions
|
||||
@ -502,8 +448,9 @@ carto.Parser = function Parser(env) {
|
||||
},
|
||||
literal: function() {
|
||||
return $(this.entities.dimension) ||
|
||||
$(this.entities.color) ||
|
||||
$(this.entities.quoted);
|
||||
$(this.entities.keywordcolor) ||
|
||||
$(this.entities.hexcolor) ||
|
||||
$(this.entities.quoted);
|
||||
},
|
||||
|
||||
// Parse url() tokens
|
||||
@ -520,8 +467,9 @@ carto.Parser = function Parser(env) {
|
||||
if (! $(')')) {
|
||||
return new tree.Invalid(value, memo, 'Missing closing ) in URL.');
|
||||
} else {
|
||||
return new tree.URL((value.value || value instanceof tree.Variable) ?
|
||||
value : new tree.Quoted(value), imports.paths);
|
||||
return new tree.URL((typeof value.value !== 'undefined' ||
|
||||
value instanceof tree.Variable) ?
|
||||
value : new tree.Quoted(value));
|
||||
}
|
||||
},
|
||||
|
||||
@ -539,41 +487,34 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
},
|
||||
|
||||
// A Hexadecimal color
|
||||
//
|
||||
// #4F3C2F
|
||||
//
|
||||
// `rgb` and `hsl` colors are parsed through the `entities.call` parser.
|
||||
color: function() {
|
||||
hexcolor: function() {
|
||||
var rgb;
|
||||
|
||||
if (input.charAt(i) === '#' && (rgb = $(/^#([a-fA-F0-9]{6}|[a-fA-F0-9]{3})/))) {
|
||||
return new tree.Color(rgb[1]);
|
||||
} else {
|
||||
rgb = chunks[j].match(/^[a-z]+/);
|
||||
if (rgb && rgb[0] in tree.Reference.data.colors) {
|
||||
return new tree.Color(tree.Reference.data.colors[$(/^[a-z]+/)]);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
keywordcolor: function() {
|
||||
var rgb = chunks[j].match(/^[a-z]+/);
|
||||
if (rgb && rgb[0] in tree.Reference.data.colors) {
|
||||
return new tree.Color(tree.Reference.data.colors[$(/^[a-z]+/)]);
|
||||
}
|
||||
},
|
||||
|
||||
// A Dimension, that is, a number and a unit. The only
|
||||
// unit that has an effect is %
|
||||
//
|
||||
// 0.5em 95%
|
||||
dimension: function() {
|
||||
var c = input.charCodeAt(i);
|
||||
if ((c > 57 || c < 45) || c === 47) return;
|
||||
var value = $(/^(-?\d*\.?\d+)(\%|\w+)?/);
|
||||
var value = $(/^(-?\d*\.?\d+(?:[eE][-+]?\d+)?)(\%|\w+)?/);
|
||||
if (value) {
|
||||
return new tree.Dimension(value[1], value[2], memo);
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// The variable part of a variable definition. Used in the `rule` parser
|
||||
//
|
||||
// @fink:
|
||||
// The variable part of a variable definition.
|
||||
// Used in the `rule` parser. Like @fink:
|
||||
variable: function() {
|
||||
var name;
|
||||
|
||||
@ -582,24 +523,20 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
},
|
||||
|
||||
//
|
||||
// Entities are the smallest recognized token,
|
||||
// and can be found inside a rule's value.
|
||||
//
|
||||
entity: function() {
|
||||
return $(this.entities.literal) ||
|
||||
return $(this.entities.call) ||
|
||||
$(this.entities.literal) ||
|
||||
$(this.entities.field) ||
|
||||
$(this.entities.variable) ||
|
||||
$(this.entities.url) ||
|
||||
$(this.entities.call) ||
|
||||
$(this.entities.keyword);
|
||||
},
|
||||
|
||||
//
|
||||
// A Rule terminator. Note that we use `peek()` to check for '}',
|
||||
// because the `block` rule will be expecting it, but we still need to make sure
|
||||
// it's there, if ';' was ommitted.
|
||||
//
|
||||
end: function() {
|
||||
return $(';') || peek('}');
|
||||
},
|
||||
@ -620,12 +557,12 @@ carto.Parser = function Parser(env) {
|
||||
|
||||
// Selectors are made out of one or more Elements, see above.
|
||||
selector: function() {
|
||||
var a, attachment;
|
||||
var e, elements = [];
|
||||
var f, filters = new tree.Filterset();
|
||||
var z, zoom = tree.Zoom.all;
|
||||
var fo, frame_offset = tree.FrameOffset.none;
|
||||
var segments = 0, conditions = 0;
|
||||
var a, attachment,
|
||||
e, elements = [],
|
||||
f, filters = new tree.Filterset(),
|
||||
z, zooms = [],
|
||||
frame_offset = tree.FrameOffset.none;
|
||||
segments = 0, conditions = 0;
|
||||
|
||||
while (
|
||||
(e = $(this.element)) ||
|
||||
@ -638,18 +575,24 @@ carto.Parser = function Parser(env) {
|
||||
if (e) {
|
||||
elements.push(e);
|
||||
} else if (z) {
|
||||
zoom &= z;
|
||||
zooms.push(z);
|
||||
conditions++;
|
||||
} else if (fo) {
|
||||
frame_offset = fo;
|
||||
conditions++;
|
||||
} else if (f) {
|
||||
filters.add(f);
|
||||
var err = filters.add(f);
|
||||
if (err) {
|
||||
throw makeError({
|
||||
message: err,
|
||||
index: i - 1
|
||||
});
|
||||
}
|
||||
conditions++;
|
||||
} else if (attachment) {
|
||||
throw makeError({
|
||||
message:'Encountered second attachment name.',
|
||||
index:i - 1
|
||||
message: 'Encountered second attachment name.',
|
||||
index: i - 1
|
||||
});
|
||||
} else {
|
||||
attachment = a;
|
||||
@ -660,7 +603,7 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
|
||||
if (segments) {
|
||||
return new tree.Selector(filters, zoom, frame_offset, elements, attachment, conditions, memo);
|
||||
return new tree.Selector(filters, zooms, frame_offset, elements, attachment, conditions, memo);
|
||||
}
|
||||
},
|
||||
|
||||
@ -668,10 +611,28 @@ carto.Parser = function Parser(env) {
|
||||
save();
|
||||
var key, op, val;
|
||||
if (! $('[')) return;
|
||||
if (key = $(/^[a-zA-Z0-9\-_]+/) || $(this.entities.quoted) || $(this.entities.variable)) {
|
||||
if (key = $(/^[a-zA-Z0-9\-_]+/) ||
|
||||
$(this.entities.quoted) ||
|
||||
$(this.entities.variable) ||
|
||||
$(this.entities.keyword) ||
|
||||
$(this.entities.field)) {
|
||||
// TODO: remove at 1.0.0
|
||||
if (key instanceof tree.Quoted) {
|
||||
key = new tree.Field(key.toString());
|
||||
}
|
||||
if ((op = $(this.entities.comparison)) &&
|
||||
(val = $(this.entities.quoted) || $(this.entities.variable) || $(/^[\w\-\.]+/))) {
|
||||
if (! $(']')) return;
|
||||
(val = $(this.entities.quoted) ||
|
||||
$(this.entities.variable) ||
|
||||
$(this.entities.dimension) ||
|
||||
$(this.entities.keyword) ||
|
||||
$(this.entities.field))) {
|
||||
if (! $(']')) {
|
||||
throw makeError({
|
||||
message: 'Missing closing ] of filter.',
|
||||
index: memo - 1
|
||||
});
|
||||
}
|
||||
if (!key.is) key = new tree.Field(key);
|
||||
return new tree.Filter(key, op, val, memo, env.filename);
|
||||
}
|
||||
}
|
||||
@ -693,16 +654,16 @@ carto.Parser = function Parser(env) {
|
||||
var op, val;
|
||||
if ($(/^\[\s*zoom/g) &&
|
||||
(op = $(this.entities.comparison)) &&
|
||||
(val = $(/^\d+/)) &&
|
||||
$(']')) {
|
||||
return tree.Zoom(op, val, memo);
|
||||
(val = $(this.entities.variable) || $(this.entities.dimension)) && $(']')) {
|
||||
return new tree.Zoom(op, val, memo);
|
||||
} else {
|
||||
// backtrack
|
||||
restore();
|
||||
}
|
||||
},
|
||||
|
||||
//
|
||||
// The `block` rule is used by `ruleset`
|
||||
// It's a wrapper around the `primary` rule, with added `{}`.
|
||||
//
|
||||
block: function() {
|
||||
var content;
|
||||
|
||||
@ -711,18 +672,20 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
},
|
||||
|
||||
//
|
||||
// div, .class, body > p {...}
|
||||
//
|
||||
ruleset: function() {
|
||||
var selectors = [], s, f, l, rules, filters = [];
|
||||
save();
|
||||
|
||||
while (s = $(this.selector)) {
|
||||
selectors.push(s);
|
||||
if (! $(',')) { break }
|
||||
while ($(this.comment)) {}
|
||||
if (! $(',')) { break; }
|
||||
while ($(this.comment)) {}
|
||||
}
|
||||
if (s) {
|
||||
while ($(this.comment)) {}
|
||||
}
|
||||
if (s) $(this.comment);
|
||||
|
||||
if (selectors.length > 0 && (rules = $(this.block))) {
|
||||
if (selectors.length === 1 &&
|
||||
@ -738,11 +701,12 @@ carto.Parser = function Parser(env) {
|
||||
restore();
|
||||
}
|
||||
},
|
||||
|
||||
rule: function() {
|
||||
var name, value, c = input.charAt(i);
|
||||
save();
|
||||
|
||||
if (c === '.' || c === '#' || c === '&') { return }
|
||||
if (c === '.' || c === '#') { return; }
|
||||
|
||||
if (name = $(this.variable) || $(this.property)) {
|
||||
value = $(this.value);
|
||||
@ -785,7 +749,11 @@ carto.Parser = function Parser(env) {
|
||||
if (! $(',')) { break; }
|
||||
}
|
||||
|
||||
if (expressions.length > 0) {
|
||||
if (expressions.length > 1) {
|
||||
return new tree.Value(expressions.map(function(e) {
|
||||
return e.value[0];
|
||||
}));
|
||||
} else if (expressions.length === 1) {
|
||||
return new tree.Value(expressions);
|
||||
}
|
||||
},
|
||||
@ -826,16 +794,14 @@ carto.Parser = function Parser(env) {
|
||||
},
|
||||
|
||||
// Expressions either represent mathematical operations,
|
||||
// or white-space delimited Entities.
|
||||
//
|
||||
// 1px solid black
|
||||
// @var * 2
|
||||
// or white-space delimited Entities. @var * 2
|
||||
expression: function() {
|
||||
var e, delim, entities = [], d;
|
||||
|
||||
while (e = $(this.addition) || $(this.entity)) {
|
||||
entities.push(e);
|
||||
}
|
||||
|
||||
if (entities.length > 0) {
|
||||
return new tree.Expression(entities);
|
||||
}
|
||||
@ -846,5 +812,5 @@ carto.Parser = function Parser(env) {
|
||||
}
|
||||
}
|
||||
};
|
||||
return parser;
|
||||
};
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
var _ = require('underscore');
|
||||
var carto = require('./index');
|
||||
var tree = require('./tree');
|
||||
|
||||
carto.Renderer = function Renderer(env, options) {
|
||||
this.env = env || {};
|
||||
@ -8,12 +7,13 @@ carto.Renderer = function Renderer(env, options) {
|
||||
this.options.mapnik_version = this.options.mapnik_version || 'latest';
|
||||
};
|
||||
|
||||
// Prepare a MML document (given as an object) into a
|
||||
// fully-localized XML file ready for Mapnik2 consumption
|
||||
//
|
||||
// - @param {String} str the JSON file as a string.
|
||||
// - @param {Object} env renderer environment options.
|
||||
carto.Renderer.prototype.render = function render(m, callback) {
|
||||
/**
|
||||
* Prepare a MSS document (given as an string) into a
|
||||
* XML Style fragment (mostly useful for debugging)
|
||||
*
|
||||
* @param {String} data the mss contents as a string.
|
||||
*/
|
||||
carto.Renderer.prototype.renderMSS = function render(data) {
|
||||
// effects is a container for side-effects, which currently
|
||||
// are limited to FontSets.
|
||||
var env = _(this.env).defaults({
|
||||
@ -22,18 +22,77 @@ carto.Renderer.prototype.render = function render(m, callback) {
|
||||
effects: []
|
||||
});
|
||||
|
||||
tree.Reference.setVersion(this.options.mapnik_version);
|
||||
if (!carto.tree.Reference.setVersion(this.options.mapnik_version)) {
|
||||
throw new Error("Could not set mapnik version to " + this.options.mapnik_version);
|
||||
}
|
||||
|
||||
var output = [];
|
||||
var styles = [];
|
||||
|
||||
if (env.benchmark) console.time('Parsing MSS');
|
||||
var parser = (carto.Parser(env)).parse(data);
|
||||
if (env.benchmark) console.timeEnd('Parsing MSS');
|
||||
|
||||
if (env.benchmark) console.time('Rule generation');
|
||||
var rule_list = parser.toList(env);
|
||||
if (env.benchmark) console.timeEnd('Rule generation');
|
||||
|
||||
if (env.benchmark) console.time('Rule inheritance');
|
||||
var rules = inheritDefinitions(rule_list, env);
|
||||
if (env.benchmark) console.timeEnd('Rule inheritance');
|
||||
|
||||
if (env.benchmark) console.time('Style sort');
|
||||
var sorted = sortStyles(rules,env);
|
||||
if (env.benchmark) console.timeEnd('Style sort');
|
||||
|
||||
if (env.benchmark) console.time('Total Style generation');
|
||||
for (var k = 0, rule, style_name; k < sorted.length; k++) {
|
||||
rule = sorted[k];
|
||||
style_name = 'style' + (rule.attachment !== '__default__' ? '-' + rule.attachment : '');
|
||||
styles.push(style_name);
|
||||
var bench_name = '\tStyle "'+style_name+'" (#'+k+') toXML';
|
||||
if (env.benchmark) console.time(bench_name);
|
||||
// env.effects can be modified by this call
|
||||
output.push(carto.tree.StyleXML(style_name, rule.attachment, rule, env));
|
||||
if (env.benchmark) console.timeEnd(bench_name);
|
||||
}
|
||||
if (env.benchmark) console.timeEnd('Total Style generation');
|
||||
if (env.errors) throw env.errors;
|
||||
return output.join('\n');
|
||||
};
|
||||
|
||||
/**
|
||||
* Prepare a MML document (given as an object) into a
|
||||
* fully-localized XML file ready for Mapnik2 consumption
|
||||
*
|
||||
* @param {String} m - the JSON file as a string.
|
||||
*/
|
||||
carto.Renderer.prototype.render = function render(m) {
|
||||
// effects is a container for side-effects, which currently
|
||||
// are limited to FontSets.
|
||||
var env = _(this.env).defaults({
|
||||
benchmark: false,
|
||||
validation_data: false,
|
||||
effects: [],
|
||||
ppi: 90.714
|
||||
});
|
||||
|
||||
if (!carto.tree.Reference.setVersion(this.options.mapnik_version)) {
|
||||
throw new Error("Could not set mapnik version to " + this.options.mapnik_version);
|
||||
}
|
||||
|
||||
var output = [];
|
||||
|
||||
// Transform stylesheets into rulesets.
|
||||
var rulesets = _(m.Stylesheet).chain()
|
||||
// Transform stylesheets into definitions.
|
||||
var definitions = _(m.Stylesheet).chain()
|
||||
.map(function(s) {
|
||||
if (typeof s == 'string') {
|
||||
throw new Error("Stylesheet object is expected not a string: '" + s + "'");
|
||||
}
|
||||
// Passing the environment from stylesheet to stylesheet,
|
||||
// allows frames and effects to be maintained.
|
||||
env = _(env).extend({filename:s.id});
|
||||
|
||||
// @TODO try/catch?
|
||||
var time = +new Date(),
|
||||
root = (carto.Parser(env)).parse(s.data);
|
||||
if (env.benchmark)
|
||||
@ -43,51 +102,65 @@ carto.Renderer.prototype.render = function render(m, callback) {
|
||||
.flatten()
|
||||
.value();
|
||||
|
||||
function appliesTo(name, classIndex) {
|
||||
return function(definition) {
|
||||
return definition.appliesTo(l.name, classIndex);
|
||||
};
|
||||
}
|
||||
|
||||
// Iterate through layers and create styles custom-built
|
||||
// for each of them, and apply those styles to the layers.
|
||||
m.Layer.forEach(function(l) {
|
||||
l.styles = [];
|
||||
var styles, l, classIndex, rules, sorted, matching;
|
||||
for (var i = 0; i < m.Layer.length; i++) {
|
||||
l = m.Layer[i];
|
||||
styles = [];
|
||||
classIndex = {};
|
||||
|
||||
if (env.benchmark) console.warn('processing layer: ' + l.id);
|
||||
// Classes are given as space-separated alphanumeric strings.
|
||||
var classes = (l['class'] || '').split(/\s+/g);
|
||||
var matching = rulesets.filter(function(definition) {
|
||||
return definition.appliesTo(l.name, classes);
|
||||
});
|
||||
var rules = inheritRules(matching, env);
|
||||
var sorted = sortStyles(rules, env);
|
||||
_(sorted).each(function(rule) {
|
||||
var style = new tree.Style(l.name, rule.attachment, rule);
|
||||
if (style) {
|
||||
l.styles.push(style.name);
|
||||
|
||||
// env.effects can be modified by this call
|
||||
output.push(style.toXML(env));
|
||||
}
|
||||
});
|
||||
if (l.styles.length) {
|
||||
output.push((new carto.tree.Layer(l)).toXML());
|
||||
for (var j = 0; j < classes.length; j++) {
|
||||
classIndex[classes[j]] = true;
|
||||
}
|
||||
});
|
||||
matching = definitions.filter(appliesTo(l.name, classIndex));
|
||||
rules = inheritDefinitions(matching, env);
|
||||
sorted = sortStyles(rules, env);
|
||||
|
||||
for (var k = 0, rule, style_name; k < sorted.length; k++) {
|
||||
rule = sorted[k];
|
||||
style_name = l.name + (rule.attachment !== '__default__' ? '-' + rule.attachment : '');
|
||||
|
||||
// env.effects can be modified by this call
|
||||
var styleXML = carto.tree.StyleXML(style_name, rule.attachment, rule, env);
|
||||
|
||||
if (styleXML) {
|
||||
output.push(styleXML);
|
||||
styles.push(style_name);
|
||||
}
|
||||
}
|
||||
|
||||
output.push(carto.tree.LayerXML(l, styles));
|
||||
}
|
||||
|
||||
output.unshift(env.effects.map(function(e) {
|
||||
return e.toXML(env);
|
||||
}).join('\n'));
|
||||
|
||||
var map_properties;
|
||||
try {
|
||||
map_properties = getMapProperties(m, rulesets, env);
|
||||
} catch (err) {
|
||||
env.error(err);
|
||||
return callback(err);
|
||||
}
|
||||
var map_properties = getMapProperties(m, definitions, env);
|
||||
|
||||
// Exit on errors.
|
||||
if (env.errors) return callback(env.errors);
|
||||
if (env.errors) throw env.errors;
|
||||
|
||||
// Pass TileJSON and other custom parameters through to Mapnik XML.
|
||||
var parameters = _(m).reduce(function(memo, v, k) {
|
||||
if (!v && v !== 0) return memo;
|
||||
|
||||
switch (k) {
|
||||
// Known skippable properties.
|
||||
case 'srs':
|
||||
case 'Layer':
|
||||
case 'Stylesheet':
|
||||
break;
|
||||
// Non URL-bound TileJSON properties.
|
||||
case 'bounds':
|
||||
case 'center':
|
||||
@ -113,6 +186,16 @@ carto.Renderer.prototype.render = function render(m, callback) {
|
||||
memo.push(' <Parameter name="interactivity_layer">' + v.layer + '</Parameter>');
|
||||
memo.push(' <Parameter name="interactivity_fields">' + v.fields + '</Parameter>');
|
||||
break;
|
||||
// Support any additional scalar properties.
|
||||
default:
|
||||
if ('string' === typeof v) {
|
||||
memo.push(' <Parameter name="' + k + '"><![CDATA[' + v + ']]></Parameter>');
|
||||
} else if ('number' === typeof v) {
|
||||
memo.push(' <Parameter name="' + k + '">' + v + '</Parameter>');
|
||||
} else if ('boolean' === typeof v) {
|
||||
memo.push(' <Parameter name="' + k + '">' + v + '</Parameter>');
|
||||
}
|
||||
break;
|
||||
}
|
||||
return memo;
|
||||
}, []);
|
||||
@ -123,27 +206,35 @@ carto.Renderer.prototype.render = function render(m, callback) {
|
||||
);
|
||||
|
||||
var properties = _(map_properties).map(function(v) { return ' ' + v; }).join('');
|
||||
|
||||
output.unshift(
|
||||
'<?xml version="1.0" ' +
|
||||
'encoding="utf-8"?>\n' +
|
||||
'<!DOCTYPE Map[]>\n' +
|
||||
'<Map' + properties + ' maximum-extent="-20037508.34,-20037508.34,20037508.34,20037508.34">\n');
|
||||
'<Map' + properties +'>\n');
|
||||
output.push('</Map>');
|
||||
return callback(null, output.join('\n'));
|
||||
return output.join('\n');
|
||||
};
|
||||
|
||||
// This function currently modifies 'current'
|
||||
function addRules(current, definition, existing) {
|
||||
var newFilters = definition.filters;
|
||||
var newRules = definition.rules;
|
||||
var updatedFilters, clone, previous;
|
||||
/**
|
||||
* This function currently modifies 'current'
|
||||
* @param {Array} current current list of rules
|
||||
* @param {Object} definition a Definition object to add to the rules
|
||||
* @param {Object} byFilter an object/dictionary of existing filters. This is
|
||||
* actually keyed `attachment->filter`
|
||||
* @param {Object} env the current environment
|
||||
*/
|
||||
function addRules(current, definition, byFilter, env) {
|
||||
var newFilters = definition.filters,
|
||||
newRules = definition.rules,
|
||||
updatedFilters, clone, previous;
|
||||
|
||||
// The current definition might have been split up into
|
||||
// multiple definitions already.
|
||||
for (var k = 0; k < current.length; k++) {
|
||||
updatedFilters = current[k].filters.cloneWith(newFilters);
|
||||
if (updatedFilters) {
|
||||
previous = existing[updatedFilters];
|
||||
previous = byFilter[updatedFilters];
|
||||
if (previous) {
|
||||
// There's already a definition with those exact
|
||||
// filters. Add the current definitions' rules
|
||||
@ -160,32 +251,63 @@ function addRules(current, definition, existing) {
|
||||
// to make sure that in the next loop iteration, we're
|
||||
// not performing the same task for this element again,
|
||||
// hence the k++.
|
||||
byFilter[updatedFilters] = clone;
|
||||
current.splice(k, 0, clone);
|
||||
k++;
|
||||
}
|
||||
}
|
||||
} else if (updatedFilters === null) {
|
||||
// if updatedFilters is null, then adding the filters doesn't
|
||||
// invalidate or split the selector, so we addRules to the
|
||||
// combined selector
|
||||
|
||||
// Filters can be added, but they don't change the
|
||||
// filters. This means we don't have to split the
|
||||
// definition.
|
||||
//
|
||||
// this is cloned here because of shared classes, see
|
||||
// sharedclass.mss
|
||||
current[k] = current[k].clone();
|
||||
current[k].addRules(newRules);
|
||||
}
|
||||
// if updatedFeatures is false, then the filters split the rule,
|
||||
// so they aren't the same inheritance chain
|
||||
}
|
||||
return current;
|
||||
}
|
||||
|
||||
// Apply inherited styles from their ancestors to them.
|
||||
function inheritRules(definitions, env) {
|
||||
/**
|
||||
* Apply inherited styles from their ancestors to them.
|
||||
*
|
||||
* called either once per render (in the case of mss) or per layer
|
||||
* (for mml)
|
||||
*
|
||||
* @param {Object} definitions - a list of definitions objects
|
||||
* that contain .rules
|
||||
* @param {Object} env - the environment
|
||||
* @return {Array<Array>} an array of arrays is returned,
|
||||
* in which each array refers to a specific attachment
|
||||
*/
|
||||
function inheritDefinitions(definitions, env) {
|
||||
var inheritTime = +new Date();
|
||||
// definitions are ordered by specificity,
|
||||
// high (index 0) to low
|
||||
var byAttachment = {}, byFilter = {};
|
||||
var byAttachment = {},
|
||||
byFilter = {};
|
||||
var result = [];
|
||||
var current, previous, attachment;
|
||||
|
||||
// Evaluate the filters specified by each definition with the given
|
||||
// environment to correctly resolve variable references
|
||||
definitions.forEach(function(d) {
|
||||
d.filters.ev(env);
|
||||
});
|
||||
|
||||
for (var i = 0; i < definitions.length; i++) {
|
||||
|
||||
attachment = definitions[i].attachment;
|
||||
current = [definitions[i]];
|
||||
|
||||
if (!byAttachment[attachment]) {
|
||||
byAttachment[attachment] = [];
|
||||
byAttachment[attachment].attachment = attachment;
|
||||
@ -193,12 +315,11 @@ function inheritRules(definitions, env) {
|
||||
result.push(byAttachment[attachment]);
|
||||
}
|
||||
|
||||
current = [definitions[i]];
|
||||
// Iterate over all subsequent rules.
|
||||
for (var j = i + 1; j < definitions.length; j++) {
|
||||
if (definitions[j].attachment === attachment) {
|
||||
// Only inherit rules from the same attachment.
|
||||
current = addRules(current, definitions[j], byFilter);
|
||||
current = addRules(current, definitions[j], byFilter[attachment], env);
|
||||
}
|
||||
}
|
||||
|
||||
@ -211,46 +332,54 @@ function inheritRules(definitions, env) {
|
||||
if (env.benchmark) console.warn('Inheritance time: ' + ((new Date() - inheritTime)) + 'ms');
|
||||
|
||||
return result;
|
||||
|
||||
}
|
||||
|
||||
// Sort styles by the minimum index of their rules.
|
||||
// This sorts a slice of the styles, so it returns a sorted
|
||||
// array but does not change the input.
|
||||
function sortStylesIndex(a, b) { return b.index - a.index; }
|
||||
function sortStyles(styles, env) {
|
||||
styles.forEach(function(style) {
|
||||
for (var i = 0; i < styles.length; i++) {
|
||||
var style = styles[i];
|
||||
style.index = Infinity;
|
||||
style.forEach(function(block) {
|
||||
block.rules.forEach(function(rule) {
|
||||
for (var b = 0; b < style.length; b++) {
|
||||
var rules = style[b].rules;
|
||||
for (var r = 0; r < rules.length; r++) {
|
||||
var rule = rules[r];
|
||||
if (rule.index < style.index) {
|
||||
style.index = rule.index;
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var result = styles.slice();
|
||||
result.sort(function(a, b) {
|
||||
return b.index - a.index;
|
||||
});
|
||||
result.sort(sortStylesIndex);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Find a rule like Map { background-color: #fff; },
|
||||
// if any, and return a list of properties to be inserted
|
||||
// into the <Map element of the resulting XML. Translates
|
||||
// properties of the mml object at `m` directly into XML
|
||||
// properties.
|
||||
//
|
||||
// - @param {Object} m the mml object.
|
||||
// - @param {Array} rulesets the output of toList.
|
||||
// - @param {Object} env.
|
||||
// - @return {String} rendered properties.
|
||||
function getMapProperties(m, rulesets, env) {
|
||||
/**
|
||||
* Find a rule like Map { background-color: #fff; },
|
||||
* if any, and return a list of properties to be inserted
|
||||
* into the <Map element of the resulting XML. Translates
|
||||
* properties of the mml object at `m` directly into XML
|
||||
* properties.
|
||||
*
|
||||
* @param {Object} m the mml object.
|
||||
* @param {Array} definitions the output of toList.
|
||||
* @param {Object} env
|
||||
* @return {String} rendered properties.
|
||||
*/
|
||||
function getMapProperties(m, definitions, env) {
|
||||
var rules = {};
|
||||
var symbolizers = tree.Reference.data.symbolizers.map;
|
||||
var symbolizers = carto.tree.Reference.data.symbolizers.map;
|
||||
|
||||
_(m).each(function(value, key) {
|
||||
if (key in symbolizers) rules[key] = key + '="' + value + '"';
|
||||
});
|
||||
|
||||
rulesets.filter(function(r) {
|
||||
definitions.filter(function(r) {
|
||||
return r.elements.join('') === 'Map';
|
||||
}).forEach(function(r) {
|
||||
for (var i = 0; i < r.rules.length; i++) {
|
||||
@ -261,10 +390,13 @@ function getMapProperties(m, rulesets, env) {
|
||||
index: r.rules[i].index
|
||||
});
|
||||
}
|
||||
rules[key] = r.rules[i].eval(env).toXML(env);
|
||||
rules[key] = r.rules[i].ev(env).toXML(env);
|
||||
}
|
||||
});
|
||||
return rules;
|
||||
}
|
||||
|
||||
module.exports = carto;
|
||||
module.exports.addRules = addRules;
|
||||
module.exports.inheritDefinitions = inheritDefinitions;
|
||||
module.exports.sortStyles = sortStyles;
|
||||
|
@ -5,13 +5,13 @@ var _ = require('underscore');
|
||||
// monkey patch less classes
|
||||
tree.Value.prototype.toJS = function() {
|
||||
//var v = this.value[0].value[0];
|
||||
var val = this.eval()
|
||||
var val = this.ev();
|
||||
var v = val.toString();
|
||||
if(val.is === "color" || val.is === 'uri' || val.is === 'string' || val.is === 'keyword') {
|
||||
v = "'" + v + "'";
|
||||
} else if (val.is === 'field') {
|
||||
// replace [varuable] by ctx['variable']
|
||||
v = v.replace(/\[(.*)\]/g, "data['\$1']")
|
||||
v = v.replace(/\[(.*)\]/g, "data['\$1']");
|
||||
}
|
||||
return "_value = " + v + ";";
|
||||
};
|
||||
@ -22,7 +22,7 @@ Object.defineProperty(tree.Filterset.prototype, 'toJS', {
|
||||
var opMap = {
|
||||
'=': '==='
|
||||
};
|
||||
return _.map(this, function(filter) {
|
||||
return _.map(this.filters, function(filter) {
|
||||
var op = filter.op;
|
||||
if(op in opMap) {
|
||||
op = opMap[op];
|
||||
@ -342,7 +342,7 @@ CartoCSS.prototype = {
|
||||
for(var prop in layer) {
|
||||
if (prop !== 'zoom' && prop !== 'frames') {
|
||||
|
||||
if(this.options.debug) console.log("****", prop);
|
||||
console.log("****", prop);
|
||||
layer[prop] = this._createFn(layer[prop]);
|
||||
}
|
||||
}
|
||||
|
@ -1,13 +0,0 @@
|
||||
(function(tree) {
|
||||
|
||||
tree.Anonymous = function Anonymous(string) {
|
||||
this.value = string.value || string;
|
||||
};
|
||||
tree.Anonymous.prototype = {
|
||||
toString: function() {
|
||||
return this.value;
|
||||
},
|
||||
eval: function() { return this; }
|
||||
};
|
||||
|
||||
})(require('../tree'));
|
@ -1,28 +1,23 @@
|
||||
(function(tree) {
|
||||
|
||||
var _ = require('underscore');
|
||||
tree.Call = function Call(name, args, index) {
|
||||
this.is = 'call';
|
||||
|
||||
this.name = name;
|
||||
this.args = args;
|
||||
this.index = index;
|
||||
};
|
||||
|
||||
tree.Call.prototype = {
|
||||
//
|
||||
// When evaluating a function call,
|
||||
is: 'call',
|
||||
// When evuating a function call,
|
||||
// we either find the function in `tree.functions` [1],
|
||||
// in which case we call it, passing the evaluated arguments,
|
||||
// or we simply print it out as it appeared originally [2].
|
||||
//
|
||||
// The *functions.js* file contains the built-in functions.
|
||||
//
|
||||
// The reason why we evaluate the arguments, is in the case where
|
||||
// we try to pass a variable to a function, like: `saturate(@color)`.
|
||||
// The function should receive the value, not the variable.
|
||||
//
|
||||
eval: function(env) {
|
||||
var args = this.args.map(function(a) { return a.eval(env); });
|
||||
'ev': function(env) {
|
||||
var args = this.args.map(function(a) { return a.ev(env); });
|
||||
|
||||
for (var i = 0; i < args.length; i++) {
|
||||
if (args[i].is === 'undefined') {
|
||||
@ -34,8 +29,18 @@ tree.Call.prototype = {
|
||||
}
|
||||
|
||||
if (this.name in tree.functions) {
|
||||
if (tree.functions[this.name].length === args.length) {
|
||||
return tree.functions[this.name].apply(tree.functions, args);
|
||||
if (tree.functions[this.name].length <= args.length) {
|
||||
var val = tree.functions[this.name].apply(tree.functions, args);
|
||||
if (val === null) {
|
||||
env.error({
|
||||
message: 'incorrect arguments given to ' + this.name + '()',
|
||||
index: this.index,
|
||||
type: 'runtime',
|
||||
filename: this.filename
|
||||
});
|
||||
return { is: 'undefined', value: 'undefined' };
|
||||
}
|
||||
return val;
|
||||
} else {
|
||||
env.error({
|
||||
message: 'incorrect number of arguments for ' + this.name +
|
||||
@ -50,10 +55,19 @@ tree.Call.prototype = {
|
||||
};
|
||||
}
|
||||
} else {
|
||||
var fn = tree.Reference.mapnikFunction(this.name);
|
||||
if (!fn) {
|
||||
var fn = tree.Reference.mapnikFunctions[this.name];
|
||||
if (fn === undefined) {
|
||||
var functions = _.pairs(tree.Reference.mapnikFunctions);
|
||||
// cheap closest, needs improvement.
|
||||
var name = this.name;
|
||||
var mean = functions.map(function(f) {
|
||||
return [f[0], tree.Reference.editDistance(name, f[0]), f[1]];
|
||||
}).sort(function(a, b) {
|
||||
return a[1] - b[1];
|
||||
});
|
||||
env.error({
|
||||
message: 'unknown function ' + this.name,
|
||||
message: 'unknown function ' + this.name + '(), did you mean ' +
|
||||
mean[0][0] + '(' + mean[0][2] + ')',
|
||||
index: this.index,
|
||||
type: 'runtime',
|
||||
filename: this.filename
|
||||
@ -63,10 +77,12 @@ tree.Call.prototype = {
|
||||
value: 'undefined'
|
||||
};
|
||||
}
|
||||
if (fn[1] !== args.length) {
|
||||
if (fn !== args.length &&
|
||||
// support variable-arg functions like `colorize-alpha`
|
||||
fn !== -1) {
|
||||
env.error({
|
||||
message: 'function ' + this.name + ' takes ' +
|
||||
fn[1] + ' arguments and was given ' + args.length,
|
||||
message: 'function ' + this.name + '() takes ' +
|
||||
fn + ' arguments and was given ' + args.length,
|
||||
index: this.index,
|
||||
type: 'runtime',
|
||||
filename: this.filename
|
||||
@ -84,18 +100,10 @@ tree.Call.prototype = {
|
||||
},
|
||||
|
||||
toString: function(env, format) {
|
||||
if (format === 'image-filter') {
|
||||
if (this.args.length) {
|
||||
return this.name + ':' + this.args.join(',');
|
||||
} else {
|
||||
return this.name;
|
||||
}
|
||||
if (this.args.length) {
|
||||
return this.name + '(' + this.args.join(',') + ')';
|
||||
} else {
|
||||
if (this.args.length) {
|
||||
return this.name + '(' + this.args.join(',') + ')';
|
||||
} else {
|
||||
return this.name;
|
||||
}
|
||||
return this.name;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1,16 +1,14 @@
|
||||
(function(tree) {
|
||||
//
|
||||
// RGB Colors - #ff0014, #eee
|
||||
//
|
||||
// can be initialized with a 3 or 6 char string or a 3 or 4 element
|
||||
// numerical array
|
||||
tree.Color = function Color(rgb, a) {
|
||||
//
|
||||
// The end goal here, is to parse the arguments
|
||||
// into an integer triplet, such as `128, 255, 0`
|
||||
//
|
||||
// This facilitates operations and conversions.
|
||||
//
|
||||
if (Array.isArray(rgb)) {
|
||||
this.rgb = rgb;
|
||||
this.rgb = rgb.slice(0, 3);
|
||||
} else if (rgb.length == 6) {
|
||||
this.rgb = rgb.match(/.{2}/g).map(function(c) {
|
||||
return parseInt(c, 16);
|
||||
@ -20,12 +18,19 @@ tree.Color = function Color(rgb, a) {
|
||||
return parseInt(c + c, 16);
|
||||
});
|
||||
}
|
||||
this.is = 'color';
|
||||
this.alpha = typeof(a) === 'number' ? a : 1;
|
||||
|
||||
if (typeof(a) === 'number') {
|
||||
this.alpha = a;
|
||||
} else if (rgb.length === 4) {
|
||||
this.alpha = rgb[3];
|
||||
} else {
|
||||
this.alpha = 1;
|
||||
}
|
||||
};
|
||||
|
||||
tree.Color.prototype = {
|
||||
eval: function() { return this; },
|
||||
is: 'color',
|
||||
'ev': function() { return this; },
|
||||
|
||||
// If we have some transparency, the only way to represent it
|
||||
// is via `rgba`. Otherwise, we use the hex representation,
|
||||
@ -49,7 +54,7 @@ tree.Color.prototype = {
|
||||
// channels will spill onto each other. Once we have
|
||||
// our result, in the form of an integer triplet,
|
||||
// we create a new Color node to hold the result.
|
||||
operate: function(op, other) {
|
||||
operate: function(env, op, other) {
|
||||
var result = [];
|
||||
|
||||
if (! (other instanceof tree.Color)) {
|
||||
@ -87,5 +92,4 @@ tree.Color.prototype = {
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
})(require('../tree'));
|
||||
|
@ -9,7 +9,7 @@ tree.Comment.prototype = {
|
||||
toString: function(env) {
|
||||
return '<!--' + this.value + '-->';
|
||||
},
|
||||
eval: function() { return this; }
|
||||
'ev': function() { return this; }
|
||||
};
|
||||
|
||||
})(require('../tree'));
|
||||
|
@ -1,15 +1,22 @@
|
||||
(function(tree) {
|
||||
var assert = require('assert');
|
||||
var assert = require('assert'),
|
||||
_ = require('underscore');
|
||||
|
||||
// A definition is the combination of a selector and rules, like
|
||||
// #foo {
|
||||
// polygon-opacity:1.0;
|
||||
// }
|
||||
//
|
||||
// The selector can have filters
|
||||
tree.Definition = function Definition(selector, rules) {
|
||||
this.elements = selector.elements;
|
||||
assert.ok(selector.filters instanceof tree.Filterset);
|
||||
this.rules = rules;
|
||||
this.ruleIndex = [];
|
||||
this.ruleIndex = {};
|
||||
for (var i = 0; i < this.rules.length; i++) {
|
||||
if ('zoom' in this.rules[i]) this.rules[i] = this.rules[i].clone();
|
||||
this.rules[i].zoom = selector.zoom;
|
||||
this.ruleIndex.push(this.rules[i].updateID());
|
||||
this.ruleIndex[this.rules[i].updateID()] = true;
|
||||
}
|
||||
this.filters = selector.filters;
|
||||
this.zoom = selector.zoom;
|
||||
@ -30,7 +37,7 @@ tree.Definition.prototype.clone = function(filters) {
|
||||
if (filters) assert.ok(filters instanceof tree.Filterset);
|
||||
var clone = Object.create(tree.Definition.prototype);
|
||||
clone.rules = this.rules.slice();
|
||||
clone.ruleIndex = this.ruleIndex.slice();
|
||||
clone.ruleIndex = _.clone(this.ruleIndex);
|
||||
clone.filters = filters ? filters : this.filters.clone();
|
||||
clone.attachment = this.attachment;
|
||||
return clone;
|
||||
@ -41,9 +48,9 @@ tree.Definition.prototype.addRules = function(rules) {
|
||||
|
||||
// Add only unique rules.
|
||||
for (var i = 0; i < rules.length; i++) {
|
||||
if (this.ruleIndex.indexOf(rules[i].id) < 0) {
|
||||
if (!this.ruleIndex[rules[i].id]) {
|
||||
this.rules.push(rules[i]);
|
||||
this.ruleIndex.push(rules[i].id);
|
||||
this.ruleIndex[rules[i].id] = true;
|
||||
added++;
|
||||
}
|
||||
}
|
||||
@ -51,24 +58,33 @@ tree.Definition.prototype.addRules = function(rules) {
|
||||
return added;
|
||||
};
|
||||
|
||||
/**
|
||||
* Determine whether this selector matches a given id
|
||||
* and array of classes, by determining whether
|
||||
* all elements it contains match.
|
||||
*/
|
||||
// Determine whether this selector matches a given id
|
||||
// and array of classes, by determining whether
|
||||
// all elements it contains match.
|
||||
tree.Definition.prototype.appliesTo = function(id, classes) {
|
||||
for (var i = 0; i < this.elements.length; i++) {
|
||||
if (!this.elements[i].matches(id, classes)) {
|
||||
return false;
|
||||
}
|
||||
for (var i = 0, l = this.elements.length; i < l; i++) {
|
||||
var elem = this.elements[i];
|
||||
if (!(elem.wildcard ||
|
||||
(elem.type === 'class' && classes[elem.clean]) ||
|
||||
(elem.type === 'id' && id === elem.clean))) return false;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
function symbolizerName(symbolizer) {
|
||||
function capitalize(str) { return str[1].toUpperCase(); }
|
||||
return symbolizer.charAt(0).toUpperCase() +
|
||||
symbolizer.slice(1).replace(/\-./, capitalize) + 'Symbolizer';
|
||||
}
|
||||
|
||||
// Get a simple list of the symbolizers, in order
|
||||
function symbolizerList(sym_order) {
|
||||
return sym_order.sort(function(a, b) { return a[1] - b[1]; })
|
||||
.map(function(v) { return v[0]; });
|
||||
}
|
||||
|
||||
tree.Definition.prototype.symbolizersToXML = function(env, symbolizers, zoom) {
|
||||
var xml = ' <Rule>\n';
|
||||
xml += tree.Zoom.toXML(zoom).join('');
|
||||
xml += this.filters.toXML(env);
|
||||
var xml = zoom.toXML(env).join('') + this.filters.toXML(env);
|
||||
|
||||
// Sort symbolizers by the index of their first property definition
|
||||
var sym_order = [], indexes = [];
|
||||
@ -81,20 +97,17 @@ tree.Definition.prototype.symbolizersToXML = function(env, symbolizers, zoom) {
|
||||
sym_order.push([key, min_idx]);
|
||||
}
|
||||
|
||||
// Get a simple list of the symbolizers, in order
|
||||
sym_order = sym_order.sort(function(a, b) {
|
||||
return a[1] - b[1];
|
||||
}).map(function(v) {
|
||||
return v[0];
|
||||
});
|
||||
sym_order = symbolizerList(sym_order);
|
||||
var sym_count = 0;
|
||||
|
||||
for (var i = 0; i < sym_order.length; i++) {
|
||||
var attributes = symbolizers[sym_order[i]];
|
||||
var symbolizer = sym_order[i].split('/').pop();
|
||||
|
||||
|
||||
// Skip the magical * symbolizer which is used for universal properties
|
||||
// which are bubbled up to Style elements intead of Symbolizer elements.
|
||||
if (symbolizer === '*') continue;
|
||||
sym_count++;
|
||||
|
||||
var fail = tree.Reference.requiredProperties(symbolizer, attributes);
|
||||
if (fail) {
|
||||
@ -106,37 +119,43 @@ tree.Definition.prototype.symbolizersToXML = function(env, symbolizers, zoom) {
|
||||
});
|
||||
}
|
||||
|
||||
var name = symbolizer.charAt(0).toUpperCase() +
|
||||
symbolizer.slice(1).replace(/\-./, function(str) {
|
||||
return str[1].toUpperCase();
|
||||
}) + 'Symbolizer';
|
||||
var name = symbolizerName(symbolizer);
|
||||
|
||||
var selfclosing = true, tagcontent;
|
||||
xml += ' <' + name + ' ';
|
||||
for (var key in attributes) {
|
||||
for (var j in attributes) {
|
||||
if (symbolizer === 'map') env.error({
|
||||
message: 'Map properties are not permitted in other rules',
|
||||
index: attributes[key].index,
|
||||
filename: attributes[key].filename
|
||||
index: attributes[j].index,
|
||||
filename: attributes[j].filename
|
||||
});
|
||||
var x = tree.Reference.selector(attributes[key].name);
|
||||
var x = tree.Reference.selector(attributes[j].name);
|
||||
if (x && x.serialization && x.serialization === 'content') {
|
||||
selfclosing = false;
|
||||
tagcontent = attributes[key].eval(env).toXML(env, true);
|
||||
tagcontent = attributes[j].ev(env).toXML(env, true);
|
||||
} else if (x && x.serialization && x.serialization === 'tag') {
|
||||
selfclosing = false;
|
||||
tagcontent = attributes[j].ev(env).toXML(env, true);
|
||||
} else {
|
||||
xml += attributes[key].eval(env).toXML(env) + ' ';
|
||||
xml += attributes[j].ev(env).toXML(env) + ' ';
|
||||
}
|
||||
}
|
||||
if (selfclosing) {
|
||||
xml += '/>\n';
|
||||
} else {
|
||||
xml += '><![CDATA[' + tagcontent + ']]></' + name + '>\n';
|
||||
} else if (typeof tagcontent !== "undefined") {
|
||||
if (tagcontent.indexOf('<') != -1) {
|
||||
xml += '>' + tagcontent + '</' + name + '>\n';
|
||||
} else {
|
||||
xml += '><![CDATA[' + tagcontent + ']]></' + name + '>\n';
|
||||
}
|
||||
}
|
||||
}
|
||||
xml += ' </Rule>\n';
|
||||
return xml;
|
||||
if (!sym_count || !xml) return '';
|
||||
return ' <Rule>\n' + xml + ' </Rule>\n';
|
||||
};
|
||||
|
||||
// Take a zoom range of zooms and 'i', the index of a rule in this.rules,
|
||||
// and finds all applicable symbolizers
|
||||
tree.Definition.prototype.collectSymbolizers = function(zooms, i) {
|
||||
var symbolizers = {}, child;
|
||||
|
||||
@ -160,18 +179,18 @@ tree.Definition.prototype.collectSymbolizers = function(zooms, i) {
|
||||
}
|
||||
};
|
||||
|
||||
// The tree.Zoom.toString function ignores the holes in zoom ranges and outputs
|
||||
// scaledenominators that cover the whole range from the first to last bit set.
|
||||
// This algorithm can produces zoom ranges that may have holes. However,
|
||||
// when using the filter-mode="first", more specific zoom filters will always
|
||||
// end up before broader ranges. The filter-mode will pick those first before
|
||||
// resorting to the zoom range with the hole and stop processing further rules.
|
||||
tree.Definition.prototype.toXML = function(env, existing) {
|
||||
// The tree.Zoom.toString function ignores the holes in zoom ranges and outputs
|
||||
// scaledenominators that cover the whole range from the first to last bit set.
|
||||
// This algorithm can produces zoom ranges that may have holes. However,
|
||||
// when using the filter-mode="first", more specific zoom filters will always
|
||||
// end up before broader ranges. The filter-mode will pick those first before
|
||||
// resorting to the zoom range with the hole and stop processing further rules.
|
||||
var filter = this.filters.toString();
|
||||
if (!(filter in existing)) existing[filter] = tree.Zoom.all;
|
||||
|
||||
var available = tree.Zoom.all, xml = '', zoom, symbolizers;
|
||||
var zooms = { available: tree.Zoom.all };
|
||||
var available = tree.Zoom.all, xml = '', zoom, symbolizers,
|
||||
zooms = { available: tree.Zoom.all };
|
||||
for (var i = 0; i < this.rules.length && available; i++) {
|
||||
zooms.rule = this.rules[i].zoom;
|
||||
if (!(existing[filter] & zooms.rule)) continue;
|
||||
@ -179,7 +198,8 @@ tree.Definition.prototype.toXML = function(env, existing) {
|
||||
while (zooms.current = zooms.rule & available) {
|
||||
if (symbolizers = this.collectSymbolizers(zooms, i)) {
|
||||
if (!(existing[filter] & zooms.current)) continue;
|
||||
xml += this.symbolizersToXML(env, symbolizers, existing[filter] & zooms.current);
|
||||
xml += this.symbolizersToXML(env, symbolizers,
|
||||
(new tree.Zoom()).setZoom(existing[filter] & zooms.current));
|
||||
existing[filter] &= ~zooms.current;
|
||||
}
|
||||
}
|
||||
|
@ -1,22 +1,48 @@
|
||||
(function(tree) {
|
||||
|
||||
var _ = require('underscore');
|
||||
//
|
||||
// A number with a unit
|
||||
//
|
||||
tree.Dimension = function Dimension(value, unit, index) {
|
||||
this.value = parseFloat(value);
|
||||
this.unit = unit || null;
|
||||
this.is = 'float';
|
||||
this.index = index;
|
||||
};
|
||||
|
||||
tree.Dimension.prototype = {
|
||||
eval: function (env) {
|
||||
if (this.unit && ['px', '%'].indexOf(this.unit) === -1) {
|
||||
env.error({
|
||||
is: 'float',
|
||||
physical_units: ['m', 'cm', 'in', 'mm', 'pt', 'pc'],
|
||||
screen_units: ['px', '%'],
|
||||
all_units: ['m', 'cm', 'in', 'mm', 'pt', 'pc', 'px', '%'],
|
||||
densities: {
|
||||
m: 0.0254,
|
||||
mm: 25.4,
|
||||
cm: 2.54,
|
||||
pt: 72,
|
||||
pc: 6
|
||||
},
|
||||
ev: function (env) {
|
||||
if (this.unit && !_.contains(this.all_units, this.unit)) {
|
||||
env.error({
|
||||
message: "Invalid unit: '" + this.unit + "'",
|
||||
index: this.index
|
||||
});
|
||||
return { is: 'undefined', value: 'undefined' };
|
||||
}
|
||||
|
||||
// normalize units which are not px or %
|
||||
if (this.unit && _.contains(this.physical_units, this.unit)) {
|
||||
if (!env.ppi) {
|
||||
env.error({
|
||||
message: "ppi is not set, so metric units can't be used",
|
||||
index: this.index
|
||||
});
|
||||
return { is: 'undefined', value: 'undefined' };
|
||||
}
|
||||
// convert all units to inch
|
||||
// convert inch to px using ppi
|
||||
this.value = (this.value / this.densities[this.unit]) * env.ppi;
|
||||
this.unit = 'px';
|
||||
}
|
||||
|
||||
return this;
|
||||
@ -28,19 +54,45 @@ tree.Dimension.prototype = {
|
||||
toColor: function() {
|
||||
return new tree.Color([this.value, this.value, this.value]);
|
||||
},
|
||||
round: function() {
|
||||
this.value = Math.round(this.value);
|
||||
return this;
|
||||
},
|
||||
toString: function() {
|
||||
return this.value.toString();
|
||||
},
|
||||
operate: function(env, op, other) {
|
||||
if (this.unit === '%' && other.unit !== '%') {
|
||||
env.error({
|
||||
message: 'If two operands differ, the first must not be %',
|
||||
index: this.index
|
||||
});
|
||||
return {
|
||||
is: 'undefined',
|
||||
value: 'undefined'
|
||||
};
|
||||
}
|
||||
|
||||
// In an operation between two Dimensions,
|
||||
// we default to the first Dimension's unit,
|
||||
// so `1px + 2em` will yield `3px`.
|
||||
// In the future, we could implement some unit
|
||||
// conversions such that `100cm + 10mm` would yield
|
||||
// `101cm`.
|
||||
operate: function(op, other) {
|
||||
if (this.unit !== '%' && other.unit === '%') {
|
||||
if (op === '*' || op === '/' || op === '%') {
|
||||
env.error({
|
||||
message: 'Percent values can only be added or subtracted from other values',
|
||||
index: this.index
|
||||
});
|
||||
return {
|
||||
is: 'undefined',
|
||||
value: 'undefined'
|
||||
};
|
||||
}
|
||||
|
||||
return new tree.Dimension(tree.operate(op,
|
||||
this.value, this.value * other.value * 0.01),
|
||||
this.unit);
|
||||
}
|
||||
|
||||
//here the operands are either the same (% or undefined or px), or one is undefined and the other is px
|
||||
return new tree.Dimension(tree.operate(op, this.value, other.value),
|
||||
this.unit || other.unit);
|
||||
this.unit || other.unit);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,39 +0,0 @@
|
||||
(function(tree) {
|
||||
|
||||
tree.Directive = function Directive(name, value) {
|
||||
this.name = name;
|
||||
if (Array.isArray(value)) {
|
||||
this.ruleset = new tree.Ruleset([], value);
|
||||
} else {
|
||||
this.value = value;
|
||||
}
|
||||
};
|
||||
tree.Directive.prototype = {
|
||||
toString: function(ctx, env) {
|
||||
if (this.ruleset) {
|
||||
this.ruleset.root = true;
|
||||
return this.name + ' {\n ' +
|
||||
this.ruleset.toString(ctx, env).trim().replace(/\n/g, '\n ') +
|
||||
'\n}\n';
|
||||
} else {
|
||||
return this.name + ' ' + this.value.toString() + ';\n';
|
||||
}
|
||||
},
|
||||
eval: function(env) {
|
||||
env.frames.unshift(this);
|
||||
this.ruleset = this.ruleset && this.ruleset.eval(env);
|
||||
env.frames.shift();
|
||||
return this;
|
||||
},
|
||||
variable: function(name) {
|
||||
return tree.Ruleset.prototype.variable.call(this.ruleset, name);
|
||||
},
|
||||
find: function() {
|
||||
return tree.Ruleset.prototype.find.apply(this.ruleset, arguments);
|
||||
},
|
||||
rulesets: function() {
|
||||
return tree.Ruleset.prototype.rulesets.apply(this.ruleset);
|
||||
}
|
||||
};
|
||||
|
||||
})(require('../tree'));
|
@ -3,31 +3,28 @@
|
||||
// An element is an id or class selector
|
||||
tree.Element = function Element(value) {
|
||||
this.value = value.trim();
|
||||
if (this.value[0] === '#') {
|
||||
this.type = 'id';
|
||||
this.clean = this.value.replace(/^#/, '');
|
||||
}
|
||||
if (this.value[0] === '.') {
|
||||
this.type = 'class';
|
||||
this.clean = this.value.replace(/^\./, '');
|
||||
}
|
||||
if (this.value.indexOf('*') !== -1) {
|
||||
this.type = 'wildcard';
|
||||
}
|
||||
};
|
||||
|
||||
// Determine the 'specificity matrix' of this
|
||||
// specific selector
|
||||
tree.Element.prototype.specificity = function() {
|
||||
return [
|
||||
(this.value[0] == '#') ? 1 : 0, // a
|
||||
(this.value[0] == '.') ? 1 : 0 // b
|
||||
(this.type === 'id') ? 1 : 0, // a
|
||||
(this.type === 'class') ? 1 : 0 // b
|
||||
];
|
||||
};
|
||||
|
||||
tree.Element.prototype.toString = function() {
|
||||
return this.value;
|
||||
};
|
||||
|
||||
// Determine whether this element matches an id or classes.
|
||||
// An element is a single id or class, or check whether the given
|
||||
// array of classes contains this, or the id is equal to this.
|
||||
//
|
||||
// Takes a plain string for id and plain strings in the array of
|
||||
// classes.
|
||||
tree.Element.prototype.matches = function(id, classes) {
|
||||
return (classes.indexOf(this.value.replace(/^\./, '')) !== -1) ||
|
||||
(this.value.replace(/^#/, '') === id) ||
|
||||
(this.value === '*');
|
||||
};
|
||||
tree.Element.prototype.toString = function() { return this.value; };
|
||||
|
||||
})(require('../tree'));
|
||||
|
@ -3,16 +3,19 @@
|
||||
tree.Expression = function Expression(value) {
|
||||
this.value = value;
|
||||
};
|
||||
|
||||
tree.Expression.prototype = {
|
||||
eval: function(env) {
|
||||
is: 'expression',
|
||||
ev: function(env) {
|
||||
if (this.value.length > 1) {
|
||||
return new tree.Expression(this.value.map(function(e) {
|
||||
return e.eval(env);
|
||||
return e.ev(env);
|
||||
}));
|
||||
} else {
|
||||
return this.value[0].eval(env);
|
||||
return this.value[0].ev(env);
|
||||
}
|
||||
},
|
||||
|
||||
toString: function(env) {
|
||||
return this.value.map(function(e) {
|
||||
return e.toString(env);
|
||||
|
@ -2,14 +2,14 @@
|
||||
|
||||
tree.Field = function Field(content) {
|
||||
this.value = content || '';
|
||||
this.is = 'field';
|
||||
};
|
||||
|
||||
tree.Field.prototype = {
|
||||
is: 'field',
|
||||
toString: function() {
|
||||
return '[' + this.value + ']';
|
||||
},
|
||||
'eval': function() {
|
||||
'ev': function() {
|
||||
return this;
|
||||
}
|
||||
};
|
||||
|
@ -1,32 +1,15 @@
|
||||
(function(tree) {
|
||||
|
||||
tree.Filter = function Filter(key, op, val, index, filename) {
|
||||
if (key.is) {
|
||||
this.key = key.value;
|
||||
this._key = key;
|
||||
} else {
|
||||
this.key = key;
|
||||
}
|
||||
|
||||
this.key = key;
|
||||
this.op = op;
|
||||
this.val = val;
|
||||
this.index = index;
|
||||
this.filename = filename;
|
||||
|
||||
if (val.is) {
|
||||
this.val = val.value;
|
||||
this._val = val;
|
||||
} else {
|
||||
this.val = val;
|
||||
}
|
||||
|
||||
if (ops[this.op][1] == 'numeric') {
|
||||
this.val = 1 * this.val;
|
||||
}
|
||||
|
||||
this.id = this.key + this.op + this.val;
|
||||
};
|
||||
|
||||
|
||||
// xmlsafe, numeric, suffix
|
||||
var ops = {
|
||||
'<': [' < ', 'numeric'],
|
||||
@ -38,15 +21,35 @@ var ops = {
|
||||
'=~': ['.match(', 'string', ')']
|
||||
};
|
||||
|
||||
tree.Filter.prototype.ev = function(env) {
|
||||
this.key = this.key.ev(env);
|
||||
this.val = this.val.ev(env);
|
||||
return this;
|
||||
};
|
||||
|
||||
tree.Filter.prototype.toXML = function(env) {
|
||||
if (this.val.eval) this._val = this.val.eval(env);
|
||||
if (this.key.eval) this._key = this.key.eval(env);
|
||||
if (this._key) var key = this._key.toString(false);
|
||||
if (this._val) var val = this._val.toString(this._val.is == 'string');
|
||||
if (tree.Reference.data.filter) {
|
||||
if (this.key.is === 'keyword' && -1 === tree.Reference.data.filter.value.indexOf(this.key.toString())) {
|
||||
env.error({
|
||||
message: this.key.toString() + ' is not a valid keyword in a filter expression',
|
||||
index: this.index,
|
||||
filename: this.filename
|
||||
});
|
||||
}
|
||||
if (this.val.is === 'keyword' && -1 === tree.Reference.data.filter.value.indexOf(this.val.toString())) {
|
||||
env.error({
|
||||
message: this.val.toString() + ' is not a valid keyword in a filter expression',
|
||||
index: this.index,
|
||||
filename: this.filename
|
||||
});
|
||||
}
|
||||
}
|
||||
var key = this.key.toString(false);
|
||||
var val = this.val.toString(this.val.is == 'string');
|
||||
|
||||
if (
|
||||
(ops[this.op][1] == 'numeric' && isNaN(this.val)) ||
|
||||
(ops[this.op][1] == 'string' && (val || this.val)[0] != "'")
|
||||
(ops[this.op][1] == 'numeric' && isNaN(val) && this.val.is !== 'field') ||
|
||||
(ops[this.op][1] == 'string' && (val)[0] != "'")
|
||||
) {
|
||||
env.error({
|
||||
message: 'Cannot use operator "' + this.op + '" with value ' + this.val,
|
||||
@ -55,7 +58,7 @@ tree.Filter.prototype.toXML = function(env) {
|
||||
});
|
||||
}
|
||||
|
||||
return '[' + (key || this.key) + ']' + ops[this.op][0] + '' + (val || this.val) + (ops[this.op][2] || '');
|
||||
return key + ops[this.op][0] + val + (ops[this.op][2] || '');
|
||||
};
|
||||
|
||||
tree.Filter.prototype.toString = function() {
|
||||
|
@ -1,224 +1,248 @@
|
||||
var tree = require('../tree');
|
||||
|
||||
tree.Filterset = function Filterset() {};
|
||||
tree.Filterset = function Filterset() {
|
||||
this.filters = {};
|
||||
};
|
||||
|
||||
Object.defineProperty(tree.Filterset.prototype, 'toXML', {
|
||||
enumerable: false,
|
||||
value: function(env) {
|
||||
var filters = [];
|
||||
for (var id in this) {
|
||||
filters.push('(' + this[id].toXML(env).trim() + ')');
|
||||
}
|
||||
|
||||
if (filters.length) {
|
||||
return ' <Filter>' + filters.join(' and ') + '</Filter>\n';
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
tree.Filterset.prototype.toXML = function(env) {
|
||||
var filters = [];
|
||||
for (var id in this.filters) {
|
||||
filters.push('(' + this.filters[id].toXML(env).trim() + ')');
|
||||
}
|
||||
});
|
||||
|
||||
Object.defineProperty(tree.Filterset.prototype, 'toString', {
|
||||
enumerable: false,
|
||||
value: function() {
|
||||
var arr = [];
|
||||
for (var id in this) arr.push(this[id].id);
|
||||
arr.sort();
|
||||
return arr.join('\t');
|
||||
if (filters.length) {
|
||||
return ' <Filter>' + filters.join(' and ') + '</Filter>\n';
|
||||
} else {
|
||||
return '';
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
Object.defineProperty(tree.Filterset.prototype, 'clone', {
|
||||
enumerable: false,
|
||||
value: function() {
|
||||
var clone = new tree.Filterset();
|
||||
for (var id in this) {
|
||||
clone[id] = this[id];
|
||||
}
|
||||
return clone;
|
||||
tree.Filterset.prototype.toString = function() {
|
||||
var arr = [];
|
||||
for (var id in this.filters) arr.push(this.filters[id].id);
|
||||
return arr.sort().join('\t');
|
||||
};
|
||||
|
||||
tree.Filterset.prototype.ev = function(env) {
|
||||
for (var i in this.filters) {
|
||||
this.filters[i].ev(env);
|
||||
}
|
||||
});
|
||||
return this;
|
||||
};
|
||||
|
||||
tree.Filterset.prototype.clone = function() {
|
||||
var clone = new tree.Filterset();
|
||||
for (var id in this.filters) {
|
||||
clone.filters[id] = this.filters[id];
|
||||
}
|
||||
return clone;
|
||||
};
|
||||
|
||||
// Note: other has to be a tree.Filterset.
|
||||
Object.defineProperty(tree.Filterset.prototype, 'cloneWith', {
|
||||
enumerable: false,
|
||||
value: function(other) {
|
||||
var additions;
|
||||
for (var id in other) {
|
||||
var status = this.addable(other[id]);
|
||||
if (status === false) {
|
||||
return false;
|
||||
tree.Filterset.prototype.cloneWith = function(other) {
|
||||
var additions = [];
|
||||
for (var id in other.filters) {
|
||||
var status = this.addable(other.filters[id]);
|
||||
// status is true, false or null. if it's null we don't fail this
|
||||
// clone nor do we add the filter.
|
||||
if (status === false) {
|
||||
return false;
|
||||
}
|
||||
if (status === true) {
|
||||
// Adding the filter will override another value.
|
||||
additions.push(other.filters[id]);
|
||||
}
|
||||
}
|
||||
|
||||
// Adding the other filters doesn't make this filterset invalid, but it
|
||||
// doesn't add anything to it either.
|
||||
if (!additions.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// We can successfully add all filters. Now clone the filterset and add the
|
||||
// new rules.
|
||||
var clone = new tree.Filterset();
|
||||
|
||||
// We can add the rules that are already present without going through the
|
||||
// add function as a Filterset is always in it's simplest canonical form.
|
||||
for (id in this.filters) {
|
||||
clone.filters[id] = this.filters[id];
|
||||
}
|
||||
|
||||
// Only add new filters that actually change the filter.
|
||||
while (id = additions.shift()) {
|
||||
clone.add(id);
|
||||
}
|
||||
|
||||
return clone;
|
||||
};
|
||||
|
||||
// Returns true when the new filter can be added, false otherwise.
|
||||
// It can also return null, and on the other side we test for === true or
|
||||
// false
|
||||
tree.Filterset.prototype.addable = function(filter) {
|
||||
var key = filter.key.toString(),
|
||||
value = filter.val.toString();
|
||||
|
||||
if (value.match(/^[0-9]+(\.[0-9]*)?$/)) value = parseFloat(value);
|
||||
|
||||
switch (filter.op) {
|
||||
case '=':
|
||||
// if there is already foo= and we're adding foo=
|
||||
if (this.filters[key + '='] !== undefined) {
|
||||
if (this.filters[key + '='].val.toString() != value) {
|
||||
return false;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
if (status === true) {
|
||||
// Adding the filter will override another value.
|
||||
if (!additions) additions = [];
|
||||
additions.push(other[id]);
|
||||
if (this.filters[key + '!=' + value] !== undefined) return false;
|
||||
if (this.filters[key + '>'] !== undefined && this.filters[key + '>'].val >= value) return false;
|
||||
if (this.filters[key + '<'] !== undefined && this.filters[key + '<'].val <= value) return false;
|
||||
if (this.filters[key + '>='] !== undefined && this.filters[key + '>='].val > value) return false;
|
||||
if (this.filters[key + '<='] !== undefined && this.filters[key + '<='].val < value) return false;
|
||||
return true;
|
||||
|
||||
case '=~':
|
||||
return true;
|
||||
|
||||
case '!=':
|
||||
if (this.filters[key + '='] !== undefined) return (this.filters[key + '='].val == value) ? false : null;
|
||||
if (this.filters[key + '!=' + value] !== undefined) return null;
|
||||
if (this.filters[key + '>'] !== undefined && this.filters[key + '>'].val >= value) return null;
|
||||
if (this.filters[key + '<'] !== undefined && this.filters[key + '<'].val <= value) return null;
|
||||
if (this.filters[key + '>='] !== undefined && this.filters[key + '>='].val > value) return null;
|
||||
if (this.filters[key + '<='] !== undefined && this.filters[key + '<='].val < value) return null;
|
||||
return true;
|
||||
|
||||
case '>':
|
||||
if (key + '=' in this.filters) {
|
||||
if (this.filters[key + '='].val <= value) {
|
||||
return false;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
if (this.filters[key + '<'] !== undefined && this.filters[key + '<'].val <= value) return false;
|
||||
if (this.filters[key + '<='] !== undefined && this.filters[key + '<='].val <= value) return false;
|
||||
if (this.filters[key + '>'] !== undefined && this.filters[key + '>'].val >= value) return null;
|
||||
if (this.filters[key + '>='] !== undefined && this.filters[key + '>='].val > value) return null;
|
||||
return true;
|
||||
|
||||
case '>=':
|
||||
if (this.filters[key + '=' ] !== undefined) return (this.filters[key + '='].val < value) ? false : null;
|
||||
if (this.filters[key + '<' ] !== undefined && this.filters[key + '<'].val <= value) return false;
|
||||
if (this.filters[key + '<='] !== undefined && this.filters[key + '<='].val < value) return false;
|
||||
if (this.filters[key + '>' ] !== undefined && this.filters[key + '>'].val >= value) return null;
|
||||
if (this.filters[key + '>='] !== undefined && this.filters[key + '>='].val >= value) return null;
|
||||
return true;
|
||||
|
||||
case '<':
|
||||
if (this.filters[key + '=' ] !== undefined) return (this.filters[key + '='].val >= value) ? false : null;
|
||||
if (this.filters[key + '>' ] !== undefined && this.filters[key + '>'].val >= value) return false;
|
||||
if (this.filters[key + '>='] !== undefined && this.filters[key + '>='].val >= value) return false;
|
||||
if (this.filters[key + '<' ] !== undefined && this.filters[key + '<'].val <= value) return null;
|
||||
if (this.filters[key + '<='] !== undefined && this.filters[key + '<='].val < value) return null;
|
||||
return true;
|
||||
|
||||
case '<=':
|
||||
if (this.filters[key + '=' ] !== undefined) return (this.filters[key + '='].val > value) ? false : null;
|
||||
if (this.filters[key + '>' ] !== undefined && this.filters[key + '>'].val >= value) return false;
|
||||
if (this.filters[key + '>='] !== undefined && this.filters[key + '>='].val > value) return false;
|
||||
if (this.filters[key + '<' ] !== undefined && this.filters[key + '<'].val <= value) return null;
|
||||
if (this.filters[key + '<='] !== undefined && this.filters[key + '<='].val <= value) return null;
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
// Does the new filter constitute a conflict?
|
||||
tree.Filterset.prototype.conflict = function(filter) {
|
||||
var key = filter.key.toString(),
|
||||
value = filter.val.toString();
|
||||
|
||||
if (!isNaN(parseFloat(value))) value = parseFloat(value);
|
||||
|
||||
// if (a=b) && (a=c)
|
||||
// if (a=b) && (a!=b)
|
||||
// or (a!=b) && (a=b)
|
||||
if ((filter.op === '=' && this.filters[key + '='] !== undefined &&
|
||||
value != this.filters[key + '='].val.toString()) ||
|
||||
(filter.op === '!=' && this.filters[key + '='] !== undefined &&
|
||||
value == this.filters[key + '='].val.toString()) ||
|
||||
(filter.op === '=' && this.filters[key + '!='] !== undefined &&
|
||||
value == this.filters[key + '!='].val.toString())) {
|
||||
return filter.toString() + ' added to ' + this.toString() + ' produces an invalid filter';
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
// Only call this function for filters that have been cleared by .addable().
|
||||
tree.Filterset.prototype.add = function(filter, env) {
|
||||
var key = filter.key.toString(),
|
||||
id,
|
||||
op = filter.op,
|
||||
conflict = this.conflict(filter),
|
||||
numval;
|
||||
|
||||
if (conflict) return conflict;
|
||||
|
||||
if (op === '=') {
|
||||
for (var i in this.filters) {
|
||||
if (this.filters[i].key == key) delete this.filters[i];
|
||||
}
|
||||
this.filters[key + '='] = filter;
|
||||
} else if (op === '!=') {
|
||||
this.filters[key + '!=' + filter.val] = filter;
|
||||
} else if (op === '=~') {
|
||||
this.filters[key + '=~' + filter.val] = filter;
|
||||
} else if (op === '>') {
|
||||
// If there are other filters that are also >
|
||||
// but are less than this one, they don't matter, so
|
||||
// remove them.
|
||||
for (var j in this.filters) {
|
||||
if (this.filters[j].key == key && this.filters[j].val <= filter.val) {
|
||||
delete this.filters[j];
|
||||
}
|
||||
}
|
||||
|
||||
// Adding the other filters doesn't make this filterset invalid, but it
|
||||
// doesn't add anything to it either.
|
||||
if (!additions) return null;
|
||||
|
||||
// We can successfully add all filters. Now clone the filterset and add the
|
||||
// new rules.
|
||||
var clone = new tree.Filterset();
|
||||
|
||||
// We can add the rules that are already present without going through the
|
||||
// add function as a Filterset is always in it's simplest canonical form.
|
||||
for (var id in this) {
|
||||
clone[id] = this[id];
|
||||
this.filters[key + '>'] = filter;
|
||||
} else if (op === '>=') {
|
||||
for (var k in this.filters) {
|
||||
numval = (+this.filters[k].val.toString());
|
||||
if (this.filters[k].key == key && numval < filter.val) {
|
||||
delete this.filters[k];
|
||||
}
|
||||
}
|
||||
|
||||
// Only add new filters that actually change the filter.
|
||||
while (id = additions.shift()) {
|
||||
clone.add(id);
|
||||
if (this.filters[key + '!=' + filter.val] !== undefined) {
|
||||
delete this.filters[key + '!=' + filter.val];
|
||||
filter.op = '>';
|
||||
this.filters[key + '>'] = filter;
|
||||
}
|
||||
|
||||
return clone;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Returns true when the new filter can be added, false otherwise.
|
||||
*/
|
||||
Object.defineProperty(tree.Filterset.prototype, 'addable', {
|
||||
enumerable: false,
|
||||
value: function(filter) {
|
||||
var key = filter.key, value = filter.val;
|
||||
|
||||
switch (filter.op) {
|
||||
case '=':
|
||||
if (key + '=' in this) return (this[key + '='].val != value) ? false : null;
|
||||
if (key + '!=' + value in this) return false;
|
||||
if (key + '>' in this && this[key + '>'].val >= value) return false;
|
||||
if (key + '<' in this && this[key + '<'].val <= value) return false;
|
||||
if (key + '>=' in this && this[key + '>='].val > value) return false;
|
||||
if (key + '<=' in this && this[key + '<='].val < value) return false;
|
||||
return true;
|
||||
|
||||
case '!=':
|
||||
if (key + '=' in this) return (this[key + '='].val == value) ? false : null;
|
||||
if (key + '!=' + value in this) return null;
|
||||
if (key + '>' in this && this[key + '>'].val >= value) return null;
|
||||
if (key + '<' in this && this[key + '<'].val <= value) return null;
|
||||
if (key + '>=' in this && this[key + '>='].val > value) return null;
|
||||
if (key + '<=' in this && this[key + '<='].val < value) return null;
|
||||
return true;
|
||||
|
||||
case '>':
|
||||
if (key + '=' in this) return (this[key + '='].val <= value) ? false : null;
|
||||
if (key + '<' in this && this[key + '<'].val <= value) return false;
|
||||
if (key + '<=' in this && this[key + '<='].val <= value) return false;
|
||||
if (key + '>' in this && this[key + '>'].val >= value) return null;
|
||||
if (key + '>=' in this && this[key + '>='].val > value) return null;
|
||||
return true;
|
||||
|
||||
case '>=':
|
||||
if (key + '=' in this) return (this[key + '='].val < value) ? false : null;
|
||||
if (key + '<' in this && this[key + '<'].val <= value) return false;
|
||||
if (key + '<=' in this && this[key + '<='].val < value) return false;
|
||||
if (key + '>' in this && this[key + '>'].val >= value) return null;
|
||||
if (key + '>=' in this && this[key + '>='].val >= value) return null;
|
||||
return true;
|
||||
|
||||
case '<':
|
||||
if (key + '=' in this) return (this[key + '='].val >= value) ? false : null;
|
||||
if (key + '>' in this && this[key + '>'].val >= value) return false;
|
||||
if (key + '>=' in this && this[key + '>='].val >= value) return false;
|
||||
if (key + '<' in this && this[key + '<'].val <= value) return null;
|
||||
if (key + '<=' in this && this[key + '<='].val < value) return null;
|
||||
return true;
|
||||
|
||||
case '<=':
|
||||
if (key + '=' in this) return (this[key + '='].val > value) ? false : null;
|
||||
if (key + '>' in this && this[key + '>'].val >= value) return false;
|
||||
if (key + '>=' in this && this[key + '>='].val > value) return false;
|
||||
if (key + '<' in this && this[key + '<'].val <= value) return null;
|
||||
if (key + '<=' in this && this[key + '<='].val <= value) return null;
|
||||
return true;
|
||||
else {
|
||||
this.filters[key + '>='] = filter;
|
||||
}
|
||||
} else if (op === '<') {
|
||||
for (var l in this.filters) {
|
||||
numval = (+this.filters[l].val.toString());
|
||||
if (this.filters[l].key == key && numval >= filter.val) {
|
||||
delete this.filters[l];
|
||||
}
|
||||
}
|
||||
this.filters[key + '<'] = filter;
|
||||
} else if (op === '<=') {
|
||||
for (var m in this.filters) {
|
||||
numval = (+this.filters[m].val.toString());
|
||||
if (this.filters[m].key == key && numval > filter.val) {
|
||||
delete this.filters[m];
|
||||
}
|
||||
}
|
||||
if (this.filters[key + '!=' + filter.val] !== undefined) {
|
||||
delete this.filters[key + '!=' + filter.val];
|
||||
filter.op = '<';
|
||||
this.filters[key + '<'] = filter;
|
||||
}
|
||||
else {
|
||||
this.filters[key + '<='] = filter;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Only call this function for filters that have been cleared by .addable().
|
||||
*/
|
||||
Object.defineProperty(tree.Filterset.prototype, 'add', {
|
||||
enumerable: false,
|
||||
value: function(filter) {
|
||||
var key = filter.key;
|
||||
|
||||
switch (filter.op) {
|
||||
case '=':
|
||||
for (var id in this) {
|
||||
if (this[id].key == key) {
|
||||
delete this[id];
|
||||
}
|
||||
}
|
||||
this[key + '='] = filter;
|
||||
break;
|
||||
|
||||
case '!=':
|
||||
this[key + '!=' + filter.val] = filter;
|
||||
break;
|
||||
|
||||
case '=~':
|
||||
this[key + '=~' + filter.val] = filter;
|
||||
break;
|
||||
|
||||
case '>':
|
||||
// If there are other filters that are also >
|
||||
// but are less than this one, they don't matter, so
|
||||
// remove them.
|
||||
for (var id in this) {
|
||||
if (this[id].key == key && this[id].val <= filter.val) {
|
||||
delete this[id];
|
||||
}
|
||||
}
|
||||
this[key + '>'] = filter;
|
||||
break;
|
||||
|
||||
case '>=':
|
||||
for (var id in this) {
|
||||
if (this[id].key == key && this[id].val < filter.val) {
|
||||
delete this[id];
|
||||
}
|
||||
}
|
||||
if (key + '!=' + filter.val in this) {
|
||||
delete this[key + '!=' + filter.val];
|
||||
filter.op = '>';
|
||||
this[key + '>'] = filter;
|
||||
}
|
||||
else {
|
||||
this[key + '>='] = filter;
|
||||
}
|
||||
break;
|
||||
|
||||
case '<':
|
||||
for (var id in this) {
|
||||
if (this[id].key == key && this[id].val >= filter.val) {
|
||||
delete this[id];
|
||||
}
|
||||
}
|
||||
this[key + '<'] = filter;
|
||||
break;
|
||||
|
||||
case '<=':
|
||||
for (var id in this) {
|
||||
if (this[id].key == key && this[id].val > filter.val) {
|
||||
delete this[id];
|
||||
}
|
||||
}
|
||||
if (key + '!=' + filter.val in this) {
|
||||
delete this[key + '!=' + filter.val];
|
||||
filter.op = '<';
|
||||
this[key + '<'] = filter;
|
||||
}
|
||||
else {
|
||||
this[key + '<='] = filter;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -1,23 +1,16 @@
|
||||
(function(tree) {
|
||||
|
||||
tree._getFontSet = function(env, fonts) {
|
||||
var find_existing = function(fonts) {
|
||||
var findFonts = fonts.join('');
|
||||
for (var i = 0; i < env.effects.length; i++) {
|
||||
if (findFonts == env.effects[i].fonts.join('')) {
|
||||
return env.effects[i];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var existing = false;
|
||||
if (existing = find_existing(fonts)) {
|
||||
return existing;
|
||||
} else {
|
||||
var new_fontset = new tree.FontSet(env, fonts);
|
||||
env.effects.push(new_fontset);
|
||||
return new_fontset;
|
||||
var fontKey = fonts.join('');
|
||||
if (env._fontMap && env._fontMap[fontKey]) {
|
||||
return env._fontMap[fontKey];
|
||||
}
|
||||
|
||||
var new_fontset = new tree.FontSet(env, fonts);
|
||||
env.effects.push(new_fontset);
|
||||
if (!env._fontMap) env._fontMap = {};
|
||||
env._fontMap[fontKey] = new_fontset;
|
||||
return new_fontset;
|
||||
};
|
||||
|
||||
tree.FontSet = function FontSet(env, fonts) {
|
||||
|
@ -1,18 +1,17 @@
|
||||
(function(tree) {
|
||||
//
|
||||
// RGB Colors - #ff0014, #eee
|
||||
//
|
||||
|
||||
tree.ImageFilter = function ImageFilter(filter, args) {
|
||||
this.is = 'imagefilter';
|
||||
this.filter = filter;
|
||||
this.args = args || null;
|
||||
};
|
||||
|
||||
tree.ImageFilter.prototype = {
|
||||
eval: function() { return this; },
|
||||
is: 'imagefilter',
|
||||
ev: function() { return this; },
|
||||
|
||||
toString: function() {
|
||||
if (this.args) {
|
||||
return this.filter + ':' + this.args.join(',');
|
||||
return this.filter + '(' + this.args.join(',') + ')';
|
||||
} else {
|
||||
return this.filter;
|
||||
}
|
||||
|
@ -6,7 +6,9 @@ tree.Invalid = function Invalid(chunk, index, message) {
|
||||
this.message = message || "Invalid code: " + this.chunk;
|
||||
};
|
||||
|
||||
tree.Invalid.prototype.eval = function(env) {
|
||||
tree.Invalid.prototype.is = 'invalid';
|
||||
|
||||
tree.Invalid.prototype.ev = function(env) {
|
||||
env.error({
|
||||
chunk: this.chunk,
|
||||
index: this.index,
|
||||
|
@ -10,7 +10,7 @@ tree.Keyword = function Keyword(value) {
|
||||
this.is = special[value] ? special[value] : 'keyword';
|
||||
};
|
||||
tree.Keyword.prototype = {
|
||||
eval: function() { return this; },
|
||||
ev: function() { return this; },
|
||||
toString: function() { return this.value; }
|
||||
};
|
||||
|
||||
|
@ -1,37 +1,36 @@
|
||||
(function(tree) {
|
||||
|
||||
tree.Layer = function Layer(obj) {
|
||||
this.name = obj.name;
|
||||
this.status = obj.status;
|
||||
this.styles = obj.styles;
|
||||
this.properties = obj.properties || {};
|
||||
this.srs = obj.srs;
|
||||
this.datasource = obj.Datasource;
|
||||
};
|
||||
|
||||
tree.Layer.prototype.toXML = function() {
|
||||
tree.LayerXML = function(obj, styles) {
|
||||
var dsoptions = [];
|
||||
for (var i in this.datasource) {
|
||||
for (var i in obj.Datasource) {
|
||||
dsoptions.push('<Parameter name="' + i + '"><![CDATA[' +
|
||||
this.datasource[i] + ']]></Parameter>');
|
||||
obj.Datasource[i] + ']]></Parameter>');
|
||||
}
|
||||
|
||||
var prop_string = '';
|
||||
for (var i in this.properties) {
|
||||
prop_string += ' ' + i + '="' + this.properties[i] + '"\n';
|
||||
for (var prop in obj.properties) {
|
||||
if (prop === 'minzoom') {
|
||||
prop_string += ' maxzoom="' + tree.Zoom.ranges[obj.properties[prop]] + '"\n';
|
||||
} else if (prop === 'maxzoom') {
|
||||
prop_string += ' minzoom="' + tree.Zoom.ranges[obj.properties[prop]+1] + '"\n';
|
||||
} else {
|
||||
prop_string += ' ' + prop + '="' + obj.properties[prop] + '"\n';
|
||||
}
|
||||
}
|
||||
|
||||
return '<Layer' +
|
||||
' name="' + this.name + '"\n' +
|
||||
' name="' + obj.name + '"\n' +
|
||||
prop_string +
|
||||
((typeof this.status === 'undefined') ? '' : ' status="' + this.status + '"\n') +
|
||||
' srs="' + this.srs + '">\n ' +
|
||||
this.styles.reverse().map(function(s) {
|
||||
((typeof obj.status === 'undefined') ? '' : ' status="' + obj.status + '"\n') +
|
||||
((typeof obj.srs === 'undefined') ? '' : ' srs="' + obj.srs + '"') + '>\n ' +
|
||||
styles.reverse().map(function(s) {
|
||||
return '<StyleName>' + s + '</StyleName>';
|
||||
}).join('\n ') +
|
||||
(dsoptions.length ?
|
||||
'\n <Datasource>\n ' +
|
||||
dsoptions.join('\n ') +
|
||||
'\n </Datasource>\n' +
|
||||
'\n </Datasource>\n'
|
||||
: '') +
|
||||
' </Layer>\n';
|
||||
};
|
||||
|
||||
|
@ -12,7 +12,7 @@ tree.Literal.prototype = {
|
||||
toString: function() {
|
||||
return this.value;
|
||||
},
|
||||
'eval': function() {
|
||||
'ev': function() {
|
||||
return this;
|
||||
}
|
||||
};
|
||||
|
@ -1,15 +1,18 @@
|
||||
// An operation is an expression with an op in between two operands,
|
||||
// like 2 + 1.
|
||||
(function(tree) {
|
||||
|
||||
tree.Operation = function Operation(op, operands, index) {
|
||||
this.op = op.trim();
|
||||
this.operands = operands;
|
||||
this.index = index;
|
||||
this.is = 'operation';
|
||||
};
|
||||
|
||||
tree.Operation.prototype.eval = function(env) {
|
||||
var a = this.operands[0].eval(env),
|
||||
b = this.operands[1].eval(env),
|
||||
tree.Operation.prototype.is = 'operation';
|
||||
|
||||
tree.Operation.prototype.ev = function(env) {
|
||||
var a = this.operands[0].ev(env),
|
||||
b = this.operands[1].ev(env),
|
||||
temp;
|
||||
|
||||
if (a.is === 'undefined' || b.is === 'undefined') {
|
||||
@ -61,11 +64,24 @@ tree.Operation.prototype.eval = function(env) {
|
||||
value: 'undefined'
|
||||
};
|
||||
} else {
|
||||
return new tree.Literal(a.eval(env).toString(true) + this.op + b.eval(env).toString(true));
|
||||
return new tree.Literal(a.ev(env).toString(true) + this.op + b.ev(env).toString(true));
|
||||
}
|
||||
}
|
||||
|
||||
return a.operate(this.op, b);
|
||||
if (a.operate === undefined) {
|
||||
env.error({
|
||||
message: 'Cannot do math with type ' + a.is + '.',
|
||||
index: this.index,
|
||||
type: 'runtime',
|
||||
filename: this.filename
|
||||
});
|
||||
return {
|
||||
is: 'undefined',
|
||||
value: 'undefined'
|
||||
};
|
||||
}
|
||||
|
||||
return a.operate(env, this.op, b);
|
||||
};
|
||||
|
||||
tree.operate = function(op, a, b) {
|
||||
|
@ -2,22 +2,28 @@
|
||||
|
||||
tree.Quoted = function Quoted(content) {
|
||||
this.value = content || '';
|
||||
this.is = 'string';
|
||||
};
|
||||
|
||||
tree.Quoted.prototype = {
|
||||
is: 'string',
|
||||
|
||||
toString: function(quotes) {
|
||||
var xmlvalue = this.value.replace(/\'/g, ''');
|
||||
return (quotes === true) ? "'" + xmlvalue + "'" : this.value;
|
||||
var escapedValue = this.value
|
||||
.replace(/&/g, '&')
|
||||
var xmlvalue = escapedValue
|
||||
.replace(/\'/g, '\\\'')
|
||||
.replace(/\"/g, '"')
|
||||
.replace(/</g, '<')
|
||||
.replace(/\>/g, '>');
|
||||
return (quotes === true) ? "'" + xmlvalue + "'" : escapedValue;
|
||||
},
|
||||
|
||||
'eval': function() {
|
||||
'ev': function() {
|
||||
return this;
|
||||
},
|
||||
|
||||
operate: function(op, other) {
|
||||
return new tree.Quoted(true,
|
||||
tree.operate(op, this.toString(), other.toString(this.contains_field)));
|
||||
operate: function(env, op, other) {
|
||||
return new tree.Quoted(tree.operate(op, this.toString(), other.toString(this.contains_field)));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1,14 +1,12 @@
|
||||
/*
|
||||
* Carto pulls in a reference from the `mapnik-reference`
|
||||
* module. This file builds indexes from that file for its various
|
||||
* options, and provides validation methods for property: value
|
||||
* combinations.
|
||||
*/
|
||||
// Carto pulls in a reference from the `mapnik-reference`
|
||||
// module. This file builds indexes from that file for its various
|
||||
// options, and provides validation methods for property: value
|
||||
// combinations.
|
||||
(function(tree) {
|
||||
|
||||
|
||||
var _ = require('underscore');
|
||||
var reference = require('mapnik-reference');
|
||||
var _ = require('underscore'),
|
||||
reference = require('mapnik-reference'),
|
||||
ref = {};
|
||||
|
||||
tree.Reference = {
|
||||
data: reference.version.latest
|
||||
@ -19,11 +17,21 @@ tree.Reference.set = function(ref, version) {
|
||||
tree.Reference.setVersion(version || 'latest');
|
||||
};
|
||||
|
||||
tree.Reference.setVersion = function(version) {
|
||||
tree.Reference.data = reference.version[version];
|
||||
ref.setData = function(data) {
|
||||
ref.data = data;
|
||||
ref.selector_cache = generateSelectorCache(data);
|
||||
ref.mapnikFunctions = generateMapnikFunctions(data);
|
||||
ref.required_cache = generateRequiredProperties(data);
|
||||
};
|
||||
|
||||
tree.Reference.required_prop_list_cache = {};
|
||||
ref.setVersion = function(version) {
|
||||
if (reference.version.hasOwnProperty(version)) {
|
||||
ref.setData(reference.version[version]);
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
tree.Reference.selectors = tree.Reference.selectors || (function() {
|
||||
var list = [];
|
||||
@ -34,76 +42,59 @@ tree.Reference.selectors = tree.Reference.selectors || (function() {
|
||||
}
|
||||
}
|
||||
}
|
||||
return list;
|
||||
})();
|
||||
});
|
||||
|
||||
tree.Reference.validSelector = function(selector) {
|
||||
return tree.Reference.selectors.indexOf(selector) !== -1;
|
||||
ref.selectorData = function(selector, i) {
|
||||
if (ref.selector_cache[selector]) return ref.selector_cache[selector][i];
|
||||
};
|
||||
|
||||
tree.Reference.selectorName = function(selector) {
|
||||
for (var i in tree.Reference.data.symbolizers) {
|
||||
for (var j in tree.Reference.data.symbolizers[i]) {
|
||||
if (selector == tree.Reference.data.symbolizers[i][j].css) {
|
||||
return j;
|
||||
ref.validSelector = function(selector) { return !!ref.selector_cache[selector]; };
|
||||
ref.selectorName = function(selector) { return ref.selectorData(selector, 2); };
|
||||
ref.selector = function(selector) { return ref.selectorData(selector, 0); };
|
||||
ref.symbolizer = function(selector) { return ref.selectorData(selector, 1); };
|
||||
|
||||
function generateSelectorCache(data) {
|
||||
var index = {};
|
||||
for (var i in data.symbolizers) {
|
||||
for (var j in data.symbolizers[i]) {
|
||||
if (data.symbolizers[i][j].hasOwnProperty('css')) {
|
||||
index[data.symbolizers[i][j].css] = [data.symbolizers[i][j], i, j];
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
return index;
|
||||
}
|
||||
|
||||
tree.Reference.selector = function(selector) {
|
||||
for (var i in tree.Reference.data.symbolizers) {
|
||||
for (var j in tree.Reference.data.symbolizers[i]) {
|
||||
if (selector == tree.Reference.data.symbolizers[i][j].css) {
|
||||
return tree.Reference.data.symbolizers[i][j];
|
||||
function generateMapnikFunctions(data) {
|
||||
var functions = {};
|
||||
for (var i in data.symbolizers) {
|
||||
for (var j in data.symbolizers[i]) {
|
||||
if (data.symbolizers[i][j].type === 'functions') {
|
||||
for (var k = 0; k < data.symbolizers[i][j].functions.length; k++) {
|
||||
var fn = data.symbolizers[i][j].functions[k];
|
||||
functions[fn[0]] = fn[1];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
return functions;
|
||||
}
|
||||
|
||||
tree.Reference.symbolizer = function(selector) {
|
||||
for (var i in tree.Reference.data.symbolizers) {
|
||||
for (var j in tree.Reference.data.symbolizers[i]) {
|
||||
if (selector == tree.Reference.data.symbolizers[i][j].css) {
|
||||
return i;
|
||||
function generateRequiredProperties(data) {
|
||||
var cache = {};
|
||||
for (var symbolizer_name in data.symbolizers) {
|
||||
cache[symbolizer_name] = [];
|
||||
for (var j in data.symbolizers[symbolizer_name]) {
|
||||
if (data.symbolizers[symbolizer_name][j].required) {
|
||||
cache[symbolizer_name].push(data.symbolizers[symbolizer_name][j].css);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
return cache;
|
||||
}
|
||||
|
||||
/*
|
||||
* For transform properties and image-filters,
|
||||
* mapnik has its own functions.
|
||||
*/
|
||||
tree.Reference.mapnikFunction = function(name) {
|
||||
var functions = [];
|
||||
for (var i in tree.Reference.data.symbolizers) {
|
||||
for (var j in tree.Reference.data.symbolizers[i]) {
|
||||
if (tree.Reference.data.symbolizers[i][j].type === 'functions') {
|
||||
functions = functions.concat(tree.Reference.data.symbolizers[i][j].functions);
|
||||
}
|
||||
}
|
||||
}
|
||||
return _.find(functions, function(f) {
|
||||
return f[0] === name;
|
||||
});
|
||||
};
|
||||
|
||||
tree.Reference.requiredPropertyList = function(symbolizer_name) {
|
||||
if (this.required_prop_list_cache[symbolizer_name]) {
|
||||
return this.required_prop_list_cache[symbolizer_name];
|
||||
}
|
||||
var properties = [];
|
||||
for (var j in tree.Reference.data.symbolizers[symbolizer_name]) {
|
||||
if (tree.Reference.data.symbolizers[symbolizer_name][j].required) {
|
||||
properties.push(tree.Reference.data.symbolizers[symbolizer_name][j].css);
|
||||
}
|
||||
}
|
||||
return this.required_prop_list_cache[symbolizer_name] = properties;
|
||||
};
|
||||
|
||||
tree.Reference.requiredProperties = function(symbolizer_name, rules) {
|
||||
var req = tree.Reference.requiredPropertyList(symbolizer_name);
|
||||
ref.requiredProperties = function(symbolizer_name, rules) {
|
||||
var req = ref.required_cache[symbolizer_name];
|
||||
for (var i in req) {
|
||||
if (!(req[i] in rules)) {
|
||||
return 'Property ' + req[i] + ' required for defining ' +
|
||||
@ -112,10 +103,8 @@ tree.Reference.requiredProperties = function(symbolizer_name, rules) {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* TODO: finish implementation - this is dead code
|
||||
*/
|
||||
tree.Reference._validateValue = {
|
||||
// TODO: finish implementation - this is dead code
|
||||
ref._validateValue = {
|
||||
'font': function(env, value) {
|
||||
if (env.validation_data && env.validation_data.fonts) {
|
||||
return env.validation_data.fonts.indexOf(value) != -1;
|
||||
@ -125,52 +114,98 @@ tree.Reference._validateValue = {
|
||||
}
|
||||
};
|
||||
|
||||
tree.Reference.isFont = function(selector) {
|
||||
return tree.Reference.selector(selector).validate == 'font';
|
||||
ref.isFont = function(selector) {
|
||||
return ref.selector(selector).validate == 'font';
|
||||
};
|
||||
|
||||
tree.Reference.validValue = function(env, selector, value) {
|
||||
// https://gist.github.com/982927
|
||||
ref.editDistance = function(a, b){
|
||||
if (a.length === 0) return b.length;
|
||||
if (b.length === 0) return a.length;
|
||||
var matrix = [];
|
||||
for (var i = 0; i <= b.length; i++) { matrix[i] = [i]; }
|
||||
for (var j = 0; j <= a.length; j++) { matrix[0][j] = j; }
|
||||
for (i = 1; i <= b.length; i++) {
|
||||
for (j = 1; j <= a.length; j++) {
|
||||
if (b.charAt(i-1) == a.charAt(j-1)) {
|
||||
matrix[i][j] = matrix[i-1][j-1];
|
||||
} else {
|
||||
matrix[i][j] = Math.min(matrix[i-1][j-1] + 1, // substitution
|
||||
Math.min(matrix[i][j-1] + 1, // insertion
|
||||
matrix[i-1][j] + 1)); // deletion
|
||||
}
|
||||
}
|
||||
}
|
||||
return matrix[b.length][a.length];
|
||||
};
|
||||
|
||||
function validateFunctions(value, selector) {
|
||||
if (value.value[0].is === 'string') return true;
|
||||
for (var i in value.value) {
|
||||
for (var j in value.value[i].value) {
|
||||
if (value.value[i].value[j].is !== 'call') return false;
|
||||
var f = _.find(ref
|
||||
.selector(selector).functions, function(x) {
|
||||
return x[0] == value.value[i].value[j].name;
|
||||
});
|
||||
if (!(f && f[1] == -1)) {
|
||||
// This filter is unknown or given an incorrect number of arguments
|
||||
if (!f || f[1] !== value.value[i].value[j].args.length) return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function validateKeyword(value, selector) {
|
||||
if (typeof ref.selector(selector).type === 'object') {
|
||||
return ref.selector(selector).type
|
||||
.indexOf(value.value[0].value) !== -1;
|
||||
} else {
|
||||
// allow unquoted keywords as strings
|
||||
return ref.selector(selector).type === 'string';
|
||||
}
|
||||
}
|
||||
|
||||
ref.validValue = function(env, selector, value) {
|
||||
var i, j;
|
||||
// TODO: handle in reusable way
|
||||
if (!tree.Reference.selector(selector)) {
|
||||
if (!ref.selector(selector)) {
|
||||
return false;
|
||||
} else if (value.value[0].is == 'keyword') {
|
||||
return tree.Reference
|
||||
.selector(selector).type
|
||||
.indexOf(value.value[0].value) !== -1;
|
||||
return validateKeyword(value, selector);
|
||||
} else if (value.value[0].is == 'undefined') {
|
||||
// caught earlier in the chain - ignore here so that
|
||||
// error is not overridden
|
||||
return true;
|
||||
} else if (tree.Reference.selector(selector).type == 'numbers') {
|
||||
} else if (ref.selector(selector).type == 'numbers') {
|
||||
for (i in value.value) {
|
||||
if (value.value[i].is !== 'float') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
} else if (tree.Reference.selector(selector).type == 'functions') {
|
||||
} else if (ref.selector(selector).type == 'tags') {
|
||||
if (!value.value) return false;
|
||||
if (!value.value[0].value) {
|
||||
return value.value[0].is === 'tag';
|
||||
}
|
||||
for (i = 0; i < value.value[0].value.length; i++) {
|
||||
if (value.value[0].value[i].is !== 'tag') return false;
|
||||
}
|
||||
return true;
|
||||
} else if (ref.selector(selector).type == 'functions') {
|
||||
// For backwards compatibility, you can specify a string for `functions`-compatible
|
||||
// values, though they will not be validated.
|
||||
if (value.value[0].is === 'string') {
|
||||
return validateFunctions(value, selector);
|
||||
} else if (ref.selector(selector).type === 'expression') {
|
||||
return true;
|
||||
} else if (ref.selector(selector).type === 'unsigned') {
|
||||
if (value.value[0].is === 'float') {
|
||||
value.value[0].round();
|
||||
return true;
|
||||
} else {
|
||||
for (i in value.value) {
|
||||
for (j in value.value[i].value) {
|
||||
if (value.value[i].value[j].is !== 'call') {
|
||||
return false;
|
||||
}
|
||||
var f = _.find(tree.Reference
|
||||
.selector(selector).functions, function(x) {
|
||||
return x[0] == value.value[i].value[j].name;
|
||||
});
|
||||
// This filter is unknown
|
||||
if (!f) return false;
|
||||
// The filter has been given an incorrect number of arguments
|
||||
if (f[1] !== value.value[i].value[j].args.length) return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
return false;
|
||||
}
|
||||
} else if (tree.Reference.selector(selector).type == 'expression') {
|
||||
return true;
|
||||
@ -182,22 +217,26 @@ tree.Reference.validValue = function(env, selector, value) {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
if (tree.Reference.selector(selector).validate) {
|
||||
if (ref.selector(selector).validate) {
|
||||
var valid = false;
|
||||
for (i = 0; i < value.value.length; i++) {
|
||||
if (tree.Reference.selector(selector).type == value.value[i].is &&
|
||||
tree.Reference
|
||||
if (ref.selector(selector).type == value.value[i].is &&
|
||||
ref
|
||||
._validateValue
|
||||
[tree.Reference.selector(selector).validate]
|
||||
[ref.selector(selector).validate]
|
||||
(env, value.value[i].value)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return valid;
|
||||
} else {
|
||||
return tree.Reference.selector(selector).type == value.value[0].is;
|
||||
return ref.selector(selector).type == value.value[0].is;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ref.setVersion('latest');
|
||||
|
||||
tree.Reference = ref;
|
||||
|
||||
})(require('../tree'));
|
||||
|
@ -1,4 +1,7 @@
|
||||
(function(tree) {
|
||||
// a rule is a single property and value combination, or variable
|
||||
// name and value combination, like
|
||||
// polygon-opacity: 1.0; or @opacity: 1.0;
|
||||
tree.Rule = function Rule(name, value, index, filename) {
|
||||
var parts = name.split('/');
|
||||
this.name = parts.pop();
|
||||
@ -11,6 +14,8 @@ tree.Rule = function Rule(name, value, index, filename) {
|
||||
this.variable = (name.charAt(0) === '@');
|
||||
};
|
||||
|
||||
tree.Rule.prototype.is = 'rule';
|
||||
|
||||
tree.Rule.prototype.clone = function() {
|
||||
var clone = Object.create(tree.Rule.prototype);
|
||||
clone.name = this.name;
|
||||
@ -24,21 +29,32 @@ tree.Rule.prototype.clone = function() {
|
||||
};
|
||||
|
||||
tree.Rule.prototype.updateID = function() {
|
||||
return this.id = this.zoom + '#' + this.name;
|
||||
return this.id = this.zoom + '#' + this.instance + '#' + this.name;
|
||||
};
|
||||
|
||||
tree.Rule.prototype.toString = function() {
|
||||
return '[' + tree.Zoom.toString(this.zoom) + '] ' + this.name + ': ' + this.value;
|
||||
};
|
||||
|
||||
function getMean(name) {
|
||||
return Object.keys(tree.Reference.selector_cache).map(function(f) {
|
||||
return [f, tree.Reference.editDistance(name, f)];
|
||||
}).sort(function(a, b) { return a[1] - b[1]; });
|
||||
}
|
||||
|
||||
// second argument, if true, outputs the value of this
|
||||
// rule without the usual attribute="content" wrapping. Right
|
||||
// now this is just for the TextSymbolizer, but applies to other
|
||||
// properties in reference.json which specify serialization=content
|
||||
tree.Rule.prototype.toXML = function(env, content, sep, format) {
|
||||
if (!tree.Reference.validSelector(this.name)) {
|
||||
var mean = getMean(this.name);
|
||||
var mean_message = '';
|
||||
if (mean[0][1] < 3) {
|
||||
mean_message = '. Did you mean ' + mean[0][0] + '?';
|
||||
}
|
||||
return env.error({
|
||||
message: "Unrecognized rule: " + this.name,
|
||||
message: "Unrecognized rule: " + this.name + mean_message,
|
||||
index: this.index,
|
||||
type: 'syntax',
|
||||
filename: this.filename
|
||||
@ -56,13 +72,20 @@ tree.Rule.prototype.toXML = function(env, content, sep, format) {
|
||||
filename: this.filename
|
||||
});
|
||||
} else {
|
||||
var typename;
|
||||
if (tree.Reference.selector(this.name).validate) {
|
||||
typename = tree.Reference.selector(this.name).validate;
|
||||
} else if (typeof tree.Reference.selector(this.name).type === 'object') {
|
||||
typename = 'keyword (options: ' + tree.Reference.selector(this.name).type.join(', ') + ')';
|
||||
} else {
|
||||
typename = tree.Reference.selector(this.name).type;
|
||||
}
|
||||
return env.error({
|
||||
message: 'Invalid value for ' +
|
||||
this.name +
|
||||
', a valid ' +
|
||||
(tree.Reference.selector(this.name).validate ||
|
||||
tree.Reference.selector(this.name).type) +
|
||||
', the type ' + typename +
|
||||
' is expected. ' + this.value +
|
||||
' (of type ' + this.value.value[0].is + ') ' +
|
||||
' was given.',
|
||||
index: this.index,
|
||||
type: 'syntax',
|
||||
@ -86,12 +109,10 @@ tree.Rule.prototype.toXML = function(env, content, sep, format) {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* TODO: Rule eval chain should add fontsets to env.frames
|
||||
*/
|
||||
tree.Rule.prototype['eval'] = function(context) {
|
||||
// TODO: Rule ev chain should add fontsets to env.frames
|
||||
tree.Rule.prototype.ev = function(context) {
|
||||
return new tree.Rule(this.name,
|
||||
this.value['eval'](context),
|
||||
this.value.ev(context),
|
||||
this.index,
|
||||
this.filename);
|
||||
};
|
||||
|
@ -7,27 +7,19 @@ tree.Ruleset = function Ruleset(selectors, rules) {
|
||||
this._lookups = {};
|
||||
};
|
||||
tree.Ruleset.prototype = {
|
||||
eval: function(env) {
|
||||
var ruleset = new tree.Ruleset(this.selectors, this.rules.slice(0));
|
||||
is: 'ruleset',
|
||||
'ev': function(env) {
|
||||
var i,
|
||||
ruleset = new tree.Ruleset(this.selectors, this.rules.slice(0));
|
||||
ruleset.root = this.root;
|
||||
|
||||
// push the current ruleset to the frames stack
|
||||
env.frames.unshift(ruleset);
|
||||
|
||||
// Evaluate imports
|
||||
if (ruleset.root) {
|
||||
for (var i = 0; i < ruleset.rules.length; i++) {
|
||||
if (ruleset.rules[i] instanceof tree.Import) {
|
||||
Array.prototype.splice
|
||||
.apply(ruleset.rules, [i, 1].concat(ruleset.rules[i].eval(env)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Evaluate everything else
|
||||
for (var i = 0, rule; i < ruleset.rules.length; i++) {
|
||||
for (i = 0, rule; i < ruleset.rules.length; i++) {
|
||||
rule = ruleset.rules[i];
|
||||
ruleset.rules[i] = rule.eval ? rule.eval(env) : rule;
|
||||
ruleset.rules[i] = rule.ev ? rule.ev(env) : rule;
|
||||
}
|
||||
|
||||
// Pop the stack
|
||||
@ -52,14 +44,6 @@ tree.Ruleset.prototype = {
|
||||
variable: function(name) {
|
||||
return this.variables()[name];
|
||||
},
|
||||
/**
|
||||
* Extend this rule by adding rules from another ruleset
|
||||
*
|
||||
* Currently this is designed to accept less specific
|
||||
* rules and add their values only if this ruleset doesn't
|
||||
* contain them.
|
||||
*/
|
||||
|
||||
rulesets: function() {
|
||||
if (this._rulesets) { return this._rulesets; }
|
||||
else {
|
||||
@ -78,7 +62,8 @@ tree.Ruleset.prototype = {
|
||||
this.rulesets().forEach(function(rule) {
|
||||
if (rule !== self) {
|
||||
for (var j = 0; j < rule.selectors.length; j++) {
|
||||
if (match = selector.match(rule.selectors[j])) {
|
||||
match = selector.match(rule.selectors[j]);
|
||||
if (match) {
|
||||
if (selector.elements.length > 1) {
|
||||
Array.prototype.push.apply(rules, rule.find(
|
||||
new tree.Selector(null, null, null, selector.elements.slice(1)), self));
|
||||
@ -92,19 +77,35 @@ tree.Ruleset.prototype = {
|
||||
});
|
||||
return this._lookups[key] = rules;
|
||||
},
|
||||
// Zooms can use variables. This replaces tree.Zoom objects on selectors
|
||||
// with simple bit-arrays that we can compare easily.
|
||||
evZooms: function(env) {
|
||||
for (var i = 0; i < this.selectors.length; i++) {
|
||||
var zval = tree.Zoom.all;
|
||||
for (var z = 0; z < this.selectors[i].zoom.length; z++) {
|
||||
zval = zval & this.selectors[i].zoom[z].ev(env).zoom;
|
||||
}
|
||||
this.selectors[i].zoom = zval;
|
||||
}
|
||||
},
|
||||
flatten: function(result, parents, env) {
|
||||
var selectors = [];
|
||||
var selectors = [], i, j;
|
||||
if (this.selectors.length === 0) {
|
||||
env.frames = env.frames.concat(this.rules);
|
||||
}
|
||||
for (var i = 0; i < this.selectors.length; i++) {
|
||||
// evaluate zoom variables on this object.
|
||||
this.evZooms(env);
|
||||
for (i = 0; i < this.selectors.length; i++) {
|
||||
var child = this.selectors[i];
|
||||
|
||||
// This is an invalid filterset.
|
||||
if (!child.filters) continue;
|
||||
if (!child.filters) {
|
||||
// TODO: is this internal inconsistency?
|
||||
// This is an invalid filterset.
|
||||
continue;
|
||||
}
|
||||
|
||||
if (parents.length) {
|
||||
for (var j = 0; j < parents.length; j++) {
|
||||
for (j = 0; j < parents.length; j++) {
|
||||
var parent = parents[j];
|
||||
|
||||
var mergedFilters = parent.filters.cloneWith(child.filters);
|
||||
@ -114,7 +115,9 @@ tree.Ruleset.prototype = {
|
||||
// the zoom levels or the attachment is different too.
|
||||
if (parent.zoom === (parent.zoom & child.zoom) &&
|
||||
parent.frame_offset === child.frame_offset &&
|
||||
parent.attachment === child.attachment) {
|
||||
parent.attachment === child.attachment &&
|
||||
parent.elements.join() === child.elements.join()) {
|
||||
selectors.push(parent);
|
||||
continue;
|
||||
} else {
|
||||
mergedFilters = parent.filters;
|
||||
@ -144,9 +147,10 @@ tree.Ruleset.prototype = {
|
||||
}
|
||||
|
||||
var rules = [];
|
||||
for (var i = 0; i < this.rules.length; i++) {
|
||||
for (i = 0; i < this.rules.length; i++) {
|
||||
var rule = this.rules[i];
|
||||
|
||||
// Recursively flatten any nested rulesets
|
||||
if (rule instanceof tree.Ruleset) {
|
||||
rule.flatten(result, selectors, env);
|
||||
} else if (rule instanceof tree.Rule) {
|
||||
@ -157,7 +161,7 @@ tree.Ruleset.prototype = {
|
||||
}
|
||||
|
||||
var index = rules.length ? rules[0].index : false;
|
||||
for (var i = 0; i < selectors.length; i++) {
|
||||
for (i = 0; i < selectors.length; i++) {
|
||||
// For specificity sort, use the position of the first rule to allow
|
||||
// defining attachments that are under current element as a descendant
|
||||
// selector.
|
||||
|
@ -1,5 +1,3 @@
|
||||
var assert = require('assert');
|
||||
|
||||
(function(tree) {
|
||||
|
||||
tree.Selector = function Selector(filters, zoom, frame_offset, elements, attachment, conditions, index) {
|
||||
@ -12,13 +10,11 @@ tree.Selector = function Selector(filters, zoom, frame_offset, elements, attachm
|
||||
this.index = index;
|
||||
};
|
||||
|
||||
/**
|
||||
* Determine the specificity of this selector
|
||||
* based on the specificity of its elements - calling
|
||||
* Element.specificity() in order to do so
|
||||
*
|
||||
* [ID, Class, Filters, Position in document]
|
||||
*/
|
||||
// Determine the specificity of this selector
|
||||
// based on the specificity of its elements - calling
|
||||
// Element.specificity() in order to do so
|
||||
//
|
||||
// [ID, Class, Filters, Position in document]
|
||||
tree.Selector.prototype.specificity = function() {
|
||||
return this.elements.reduce(function(memo, e) {
|
||||
var spec = e.specificity();
|
||||
|
@ -1,54 +1,68 @@
|
||||
(function(tree) {
|
||||
var _ = require('underscore');
|
||||
|
||||
tree.Style = function Style(name, attachment, definitions) {
|
||||
this.attachment = attachment;
|
||||
this.definitions = definitions;
|
||||
this.name = name + (attachment !== '__default__' ? '-' + attachment : '');
|
||||
};
|
||||
|
||||
tree.Style.prototype.toXML = function(env) {
|
||||
// Given a style's name, attachment, definitions, and an environment object,
|
||||
// return a stringified style for Mapnik
|
||||
tree.StyleXML = function(name, attachment, definitions, env) {
|
||||
var existing = {};
|
||||
var image_filters = [], image_filters_inflate = [], direct_image_filters = [], comp_op = [], opacity = [];
|
||||
|
||||
var image_filters = _.flatten(this.definitions.map(function(definition) {
|
||||
return definition.rules.filter(function(rule) {
|
||||
return (rule.name === 'image-filters');
|
||||
});
|
||||
}));
|
||||
for (var i = 0; i < definitions.length; i++) {
|
||||
for (var j = 0; j < definitions[i].rules.length; j++) {
|
||||
if (definitions[i].rules[j].name === 'image-filters') {
|
||||
image_filters.push(definitions[i].rules[j]);
|
||||
}
|
||||
if (definitions[i].rules[j].name === 'image-filters-inflate') {
|
||||
image_filters_inflate.push(definitions[i].rules[j]);
|
||||
}
|
||||
if (definitions[i].rules[j].name === 'direct-image-filters') {
|
||||
direct_image_filters.push(definitions[i].rules[j]);
|
||||
}
|
||||
if (definitions[i].rules[j].name === 'comp-op') {
|
||||
comp_op.push(definitions[i].rules[j]);
|
||||
}
|
||||
if (definitions[i].rules[j].name === 'opacity') {
|
||||
opacity.push(definitions[i].rules[j]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var comp_op = _.flatten(this.definitions.map(function(definition) {
|
||||
return definition.rules.filter(function(rule) {
|
||||
return (rule.name === 'composite-operation');
|
||||
});
|
||||
}));
|
||||
|
||||
var opacity = _.flatten(this.definitions.map(function(definition) {
|
||||
return definition.rules.filter(function(rule) {
|
||||
return (rule.name === 'opacity');
|
||||
});
|
||||
}));
|
||||
|
||||
var rules = this.definitions.map(function(definition) {
|
||||
var rules = definitions.map(function(definition) {
|
||||
return definition.toXML(env, existing);
|
||||
});
|
||||
|
||||
var attrs_xml = '';
|
||||
|
||||
if (image_filters.length) {
|
||||
attrs_xml += ' image-filters="' + image_filters.map(function(f) {
|
||||
return f.eval(env).toXML(env, true, ' ', 'image-filter');
|
||||
}).join(' ') + '" ';
|
||||
attrs_xml += ' image-filters="' + _.chain(image_filters)
|
||||
// prevent identical filters from being duplicated in the style
|
||||
.uniq(function(i) { return i.id; }).map(function(f) {
|
||||
return f.ev(env).toXML(env, true, ',', 'image-filter');
|
||||
}).value().join(',') + '"';
|
||||
}
|
||||
|
||||
if (comp_op.length) {
|
||||
attrs_xml += ' comp-op="' + comp_op[0].value.eval(env).toString() + '" ';
|
||||
if (image_filters_inflate.length) {
|
||||
attrs_xml += ' image-filters-inflate="' + image_filters_inflate[0].value.ev(env).toString() + '"';
|
||||
}
|
||||
|
||||
if (opacity.length) {
|
||||
attrs_xml += ' opacity="' + opacity[0].value.eval(env).toString() + '" ';
|
||||
if (direct_image_filters.length) {
|
||||
attrs_xml += ' direct-image-filters="' + _.chain(direct_image_filters)
|
||||
// prevent identical filters from being duplicated in the style
|
||||
.uniq(function(i) { return i.id; }).map(function(f) {
|
||||
return f.ev(env).toXML(env, true, ',', 'direct-image-filter');
|
||||
}).value().join(',') + '"';
|
||||
}
|
||||
|
||||
return '<Style name="' + this.name + '" filter-mode="first" ' + attrs_xml + '>\n' + rules.join('') + '</Style>';
|
||||
if (comp_op.length && comp_op[0].value.ev(env).value != 'src-over') {
|
||||
attrs_xml += ' comp-op="' + comp_op[0].value.ev(env).toString() + '"';
|
||||
}
|
||||
|
||||
if (opacity.length && opacity[0].value.ev(env).value != 1) {
|
||||
attrs_xml += ' opacity="' + opacity[0].value.ev(env).toString() + '"';
|
||||
}
|
||||
var rule_string = rules.join('');
|
||||
if (!attrs_xml && !rule_string) return '';
|
||||
return '<Style name="' + name + '" filter-mode="first"' + attrs_xml + '>\n' + rule_string + '</Style>';
|
||||
};
|
||||
|
||||
})(require('../tree'));
|
||||
|
@ -3,14 +3,15 @@
|
||||
tree.URL = function URL(val, paths) {
|
||||
this.value = val;
|
||||
this.paths = paths;
|
||||
this.is = 'uri';
|
||||
};
|
||||
|
||||
tree.URL.prototype = {
|
||||
is: 'uri',
|
||||
toString: function() {
|
||||
return this.value.toString();
|
||||
},
|
||||
eval: function(ctx) {
|
||||
return new tree.URL(this.value.eval(ctx), this.paths);
|
||||
ev: function(ctx) {
|
||||
return new tree.URL(this.value.ev(ctx), this.paths);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -2,15 +2,16 @@
|
||||
|
||||
tree.Value = function Value(value) {
|
||||
this.value = value;
|
||||
this.is = 'value';
|
||||
};
|
||||
|
||||
tree.Value.prototype = {
|
||||
eval: function(env) {
|
||||
is: 'value',
|
||||
ev: function(env) {
|
||||
if (this.value.length === 1) {
|
||||
return this.value[0].eval(env);
|
||||
return this.value[0].ev(env);
|
||||
} else {
|
||||
return new tree.Value(this.value.map(function(v) {
|
||||
return v.eval(env);
|
||||
return v.ev(env);
|
||||
}));
|
||||
}
|
||||
},
|
||||
|
@ -5,20 +5,24 @@ tree.Variable = function Variable(name, index, filename) {
|
||||
this.index = index;
|
||||
this.filename = filename;
|
||||
};
|
||||
|
||||
tree.Variable.prototype = {
|
||||
eval: function(env) {
|
||||
is: 'variable',
|
||||
toString: function() {
|
||||
return this.name;
|
||||
},
|
||||
ev: function(env) {
|
||||
var variable,
|
||||
v,
|
||||
that = this,
|
||||
name = this.name;
|
||||
|
||||
if (this._css) return this._css;
|
||||
|
||||
var thisframe = env.frames.filter(function(f) {
|
||||
return f.name == that.name;
|
||||
});
|
||||
return f.name == this.name;
|
||||
}.bind(this));
|
||||
if (thisframe.length) {
|
||||
return thisframe[0].value.eval(env);
|
||||
return thisframe[0].value.ev(env);
|
||||
} else {
|
||||
env.error({
|
||||
message: 'variable ' + this.name + ' is undefined',
|
||||
|
@ -4,22 +4,34 @@ var tree = require('../tree');
|
||||
// and stores them as bit-sequences so that they can be combined,
|
||||
// inverted, and compared quickly.
|
||||
tree.Zoom = function(op, value, index) {
|
||||
value = parseInt(value, 10);
|
||||
if (value > tree.Zoom.maxZoom || value < 0) {
|
||||
throw {
|
||||
message: 'Only zoom levels between 0 and ' +
|
||||
tree.Zoom.maxZoom + ' supported.',
|
||||
index: index
|
||||
};
|
||||
}
|
||||
this.op = op;
|
||||
this.value = value;
|
||||
this.index = index;
|
||||
};
|
||||
|
||||
tree.Zoom.prototype.setZoom = function(zoom) {
|
||||
this.zoom = zoom;
|
||||
return this;
|
||||
};
|
||||
|
||||
tree.Zoom.prototype.ev = function(env) {
|
||||
var start = 0,
|
||||
end = Infinity,
|
||||
value = parseInt(this.value.ev(env).toString(), 10),
|
||||
zoom = 0;
|
||||
|
||||
switch (op) {
|
||||
if (value > tree.Zoom.maxZoom || value < 0) {
|
||||
env.error({
|
||||
message: 'Only zoom levels between 0 and ' +
|
||||
tree.Zoom.maxZoom + ' supported.',
|
||||
index: this.index
|
||||
});
|
||||
}
|
||||
|
||||
switch (this.op) {
|
||||
case '=':
|
||||
return 1 << value;
|
||||
this.zoom = 1 << value;
|
||||
return this;
|
||||
case '>':
|
||||
start = value + 1;
|
||||
break;
|
||||
@ -38,7 +50,12 @@ tree.Zoom = function(op, value, index) {
|
||||
zoom |= (1 << i);
|
||||
}
|
||||
}
|
||||
return zoom;
|
||||
this.zoom = zoom;
|
||||
return this;
|
||||
};
|
||||
|
||||
tree.Zoom.prototype.toString = function() {
|
||||
return this.zoom;
|
||||
};
|
||||
|
||||
// Covers all zoomlevels from 0 to 22
|
||||
@ -74,12 +91,12 @@ tree.Zoom.ranges = {
|
||||
};
|
||||
|
||||
// Only works for single range zooms. `[XXX....XXXXX.........]` is invalid.
|
||||
tree.Zoom.toXML = function(zoom) {
|
||||
tree.Zoom.prototype.toXML = function() {
|
||||
var conditions = [];
|
||||
if (zoom != tree.Zoom.all) {
|
||||
if (this.zoom != tree.Zoom.all) {
|
||||
var start = null, end = null;
|
||||
for (var i = 0; i <= tree.Zoom.maxZoom; i++) {
|
||||
if (zoom & (1 << i)) {
|
||||
if (this.zoom & (1 << i)) {
|
||||
if (start === null) start = i;
|
||||
end = i;
|
||||
}
|
||||
@ -92,11 +109,10 @@ tree.Zoom.toXML = function(zoom) {
|
||||
return conditions;
|
||||
};
|
||||
|
||||
|
||||
tree.Zoom.toString = function(zoom) {
|
||||
tree.Zoom.prototype.toString = function() {
|
||||
var str = '';
|
||||
for (var i = 0; i <= tree.Zoom.maxZoom; i++) {
|
||||
str += (zoom & (1 << i)) ? 'X' : '.';
|
||||
str += (this.zoom & (1 << i)) ? 'X' : '.';
|
||||
}
|
||||
return str;
|
||||
};
|
||||
|
1
makefile
1
makefile
@ -3,6 +3,7 @@
|
||||
#
|
||||
|
||||
expresso = ./node_modules/.bin/mocha
|
||||
<<<<<<< HEAD
|
||||
docco = ./node_modules/.bin/docco
|
||||
uglify = ./node_modules/.bin/uglify
|
||||
|
||||
|
24
man/carto.1
Normal file
24
man/carto.1
Normal file
@ -0,0 +1,24 @@
|
||||
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.40.12.
|
||||
.TH CARTO "1" "March 2013" "carto 0.9.4" "User Commands"
|
||||
.SH NAME
|
||||
carto \- Carto map stylesheet compiler
|
||||
.SH SYNOPSIS
|
||||
.B carto [OPTION]
|
||||
\fI<source MML file>\fR
|
||||
.SH DESCRIPTION
|
||||
Carto is a stylesheet renderer for Mapnik. It's an evolution of
|
||||
the Cascadenik idea and language, with an emphasis on speed and
|
||||
flexibility.
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-v\fR \fB\-\-version\fR
|
||||
Parse JSON map manifest
|
||||
.TP
|
||||
\fB\-b\fR \fB\-\-benchmark\fR
|
||||
Outputs total compile time
|
||||
.TP
|
||||
\fB\-n\fR \fB\-\-nosymlink\fR
|
||||
Use absolute paths instead of symlinking files
|
||||
.SH REPORTING BUGS
|
||||
Please report bugs on the GitHub issue tracker:
|
||||
<\fBhttps://github.com/mapbox/carto/issues\fR>
|
30
package.json
30
package.json
@ -1,14 +1,14 @@
|
||||
{
|
||||
"name": "carto",
|
||||
"version": "0.8.1",
|
||||
"version": "0.11.0",
|
||||
"description": "Mapnik Stylesheet Compiler",
|
||||
"url": "https://github.com/mapbox/carto",
|
||||
"repositories": [{
|
||||
"type": "git",
|
||||
"url": "http://github.com/mapbox/carto.git"
|
||||
}],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "http://github.com/mapbox/carto.git"
|
||||
},
|
||||
"author": {
|
||||
"name": "MapBox",
|
||||
"name": "Mapbox",
|
||||
"url": "http://mapbox.com/",
|
||||
"email": "info@mapbox.com"
|
||||
},
|
||||
@ -24,29 +24,31 @@
|
||||
"Alexis Sellier <self@cloudhead.net>"
|
||||
],
|
||||
"licenses": [{
|
||||
"type": "Apache"
|
||||
"type": "Apache"
|
||||
}],
|
||||
"bin": {
|
||||
"carto": "./bin/carto",
|
||||
"mml2json.js": "./bin/mml2json.js"
|
||||
},
|
||||
"man": "./man/carto.1",
|
||||
"main": "./lib/carto/index",
|
||||
"engines": {
|
||||
"node": ">=0.4.x"
|
||||
},
|
||||
"dependencies": {
|
||||
"underscore": "~1.3.3",
|
||||
"mapnik-reference": "~3.1.0",
|
||||
"xml2js": "~0.1.13"
|
||||
"underscore": "~1.6.0",
|
||||
"mapnik-reference": "5.0.9",
|
||||
"xml2js": "~0.4.2",
|
||||
"optimist": "~0.6.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"mocha": "",
|
||||
"mocha": "1.12.x",
|
||||
"jshint": "0.2.x",
|
||||
"docco": "0.3.x",
|
||||
"jshint": "",
|
||||
"sax": "0.1.x"
|
||||
},
|
||||
"scripts": {
|
||||
"pretest": "npm install --dev",
|
||||
"test": "mocha"
|
||||
"pretest": "npm install",
|
||||
"test": "mocha -R spec"
|
||||
}
|
||||
}
|
||||
|
37
test/bincarto.test.js
Normal file
37
test/bincarto.test.js
Normal file
@ -0,0 +1,37 @@
|
||||
var assert = require('assert');
|
||||
var exec = require('child_process').exec;
|
||||
var path = require('path');
|
||||
var util = require('util');
|
||||
var helper = require('./support/helper');
|
||||
var bin = path.resolve(path.join(__dirname, '..', 'bin', 'carto'));
|
||||
var fs = require('fs');
|
||||
|
||||
describe('bin/carto', function() {
|
||||
it('errors on no input', function(done) {
|
||||
exec(bin, function(err, stdout, stderr) {
|
||||
assert.equal(1, err.code);
|
||||
assert.equal("carto: no input files ('carto -h or --help' for help)\n", stdout);
|
||||
done();
|
||||
});
|
||||
});
|
||||
it('renders mml', function(done) {
|
||||
var file = path.join(__dirname, 'rendering', 'identity.mml');
|
||||
exec(util.format('%s %s', bin, file), function(err, stdout, stderr) {
|
||||
assert.ifError(err);
|
||||
helper.compareToXMLFile(helper.resultFile(file), stdout, done, [
|
||||
helper.removeAbsoluteImages,
|
||||
helper.removeAbsoluteDatasources
|
||||
]);
|
||||
});
|
||||
});
|
||||
it('renders mss', function(done) {
|
||||
var file = path.join(__dirname, 'rendering-mss', 'empty_name.mss');
|
||||
exec(util.format('%s %s', bin, file), function(err, stdout, stderr) {
|
||||
assert.ifError(err);
|
||||
var expected = file.replace(path.extname(file),'')+'.xml';
|
||||
var expected_data = fs.readFileSync(expected, 'utf8');
|
||||
assert.equal(stdout,expected_data + '\n');
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
@ -6,9 +6,10 @@ var carto = require('../lib/carto');
|
||||
var tree = require('../lib/carto/tree');
|
||||
var helper = require('./support/helper');
|
||||
|
||||
describe('Error handling', function() {
|
||||
describe('Error handling mml+mss', function() {
|
||||
helper.files('errorhandling', 'mml', function(file) {
|
||||
it('should handle errors in ' + path.basename(file), function(done) {
|
||||
var basename = path.basename(file);
|
||||
it('should handle errors in ' + basename, function(done) {
|
||||
var completed = false;
|
||||
var renderResult;
|
||||
var mml = helper.mml(file);
|
||||
@ -18,26 +19,52 @@ helper.files('errorhandling', 'mml', function(file) {
|
||||
data_dir: path.join(__dirname, '../data'),
|
||||
local_data_dir: path.join(__dirname, 'rendering'),
|
||||
filename: file
|
||||
}).render(mml, function (err) {
|
||||
var result = helper.resultFile(file);
|
||||
var output = err.message;
|
||||
// @TODO for some reason, fs.readFile includes an additional \n
|
||||
// at the end of read files. Determine why.
|
||||
fs.readFile(helper.resultFile(file), 'utf8', function(err, data) {
|
||||
if (!err) assert.deepEqual(output, data.substr(0, data.length - 1));
|
||||
});
|
||||
});
|
||||
}).render(mml);
|
||||
} catch(err) {
|
||||
if (err.message.indexOf('***') > -1) throw err;
|
||||
var result = helper.resultFile(file);
|
||||
var output = err.message;
|
||||
// @TODO for some reason, fs.readFile includes an additional \n
|
||||
// at the end of read files. Determine why.
|
||||
fs.readFile(helper.resultFile(file), 'utf8', function(err, data) {
|
||||
if (!err) assert.deepEqual(output, data.substr(0, data.length - 1));
|
||||
});
|
||||
// fs.writeFileSync(helper.resultFile(file), output);
|
||||
var data = fs.readFileSync(helper.resultFile(file), 'utf8');
|
||||
assert.deepEqual(output, data);
|
||||
done();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Error handling mss', function() {
|
||||
helper.files('errorhandling', 'mss', function(file) {
|
||||
var basename = path.basename(file);
|
||||
if (basename == 'multi_stylesheets_a.mss') {
|
||||
return;
|
||||
}
|
||||
it('should handle errors in ' + basename, function(done) {
|
||||
var completed = false;
|
||||
var renderResult;
|
||||
var mss = helper.mss(file);
|
||||
try {
|
||||
new carto.Renderer({
|
||||
paths: [ path.dirname(file) ],
|
||||
data_dir: path.join(__dirname, '../data'),
|
||||
local_data_dir: path.join(__dirname, 'rendering'),
|
||||
// note: we use the basename here so that the expected error result
|
||||
// will match if the style was loaded from mml
|
||||
filename: basename
|
||||
}).renderMSS(mss);
|
||||
} catch(err) {
|
||||
if (err.message.indexOf('***') > -1) throw err;
|
||||
var result = helper.resultFile(file);
|
||||
var output = err.message;
|
||||
// @TODO for some reason, fs.readFile includes an additional \n
|
||||
// at the end of read files. Determine why.
|
||||
// fs.writeFileSync(helper.resultFile(file), output);
|
||||
var data = fs.readFileSync(helper.resultFile(file), 'utf8');
|
||||
assert.deepEqual(output, data);
|
||||
done();
|
||||
}
|
||||
|
||||
done();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
15
test/errorhandling/color_functions.mml
Normal file
15
test/errorhandling/color_functions.mml
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Stylesheet": [
|
||||
"color_functions.mss"
|
||||
],
|
||||
"Layer": [{
|
||||
"id": "world",
|
||||
"name": "world",
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Datasource": {
|
||||
"file": "http://tilemill-data.s3.amazonaws.com/test_data/shape_demo.zip",
|
||||
"type": "shape"
|
||||
}
|
||||
}]
|
||||
}
|
4
test/errorhandling/color_functions.mss
Normal file
4
test/errorhandling/color_functions.mss
Normal file
@ -0,0 +1,4 @@
|
||||
@foo: 'bar';
|
||||
#world {
|
||||
polygon-fill: hsl(1, @foo, 3);
|
||||
}
|
1
test/errorhandling/color_functions.result
Normal file
1
test/errorhandling/color_functions.result
Normal file
@ -0,0 +1 @@
|
||||
color_functions.mss:3:31 incorrect arguments given to hsl()
|
15
test/errorhandling/contradiction.mml
Normal file
15
test/errorhandling/contradiction.mml
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Stylesheet": [
|
||||
"contradiction.mss"
|
||||
],
|
||||
"Layer": [{
|
||||
"name": "world",
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Datasource": {
|
||||
"file": "http://tilemill-data.s3.amazonaws.com/district.geojson",
|
||||
"type": "ogr",
|
||||
"layer": "OGRGeoJSON"
|
||||
}
|
||||
}]
|
||||
}
|
3
test/errorhandling/contradiction.mss
Normal file
3
test/errorhandling/contradiction.mss
Normal file
@ -0,0 +1,3 @@
|
||||
#world[FeatureCla!=""][FeatureCla=""] {
|
||||
polygon-fill: #fff;
|
||||
}
|
1
test/errorhandling/contradiction.result
Normal file
1
test/errorhandling/contradiction.result
Normal file
@ -0,0 +1 @@
|
||||
contradiction.mss:1:37 [[FeatureCla]=] added to [FeatureCla]!= produces an invalid filter
|
3
test/errorhandling/contradiction_2.mss
Normal file
3
test/errorhandling/contradiction_2.mss
Normal file
@ -0,0 +1,3 @@
|
||||
#world[FeatureCla=""][FeatureCla!=""] {
|
||||
polygon-fill: #fff;
|
||||
}
|
1
test/errorhandling/contradiction_2.result
Normal file
1
test/errorhandling/contradiction_2.result
Normal file
@ -0,0 +1 @@
|
||||
contradiction_2.mss:1:37 [[FeatureCla]!=] added to [FeatureCla]= produces an invalid filter
|
15
test/errorhandling/function_args.mml
Normal file
15
test/errorhandling/function_args.mml
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Stylesheet": [
|
||||
"function_args.mss"
|
||||
],
|
||||
"Layer": [{
|
||||
"id": "world",
|
||||
"name": "world",
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Datasource": {
|
||||
"file": "http://tilemill-data.s3.amazonaws.com/test_data/shape_demo.zip",
|
||||
"type": "shape"
|
||||
}
|
||||
}]
|
||||
}
|
4
test/errorhandling/function_args.mss
Normal file
4
test/errorhandling/function_args.mss
Normal file
@ -0,0 +1,4 @@
|
||||
#world {
|
||||
point-transform: scale(2, 2);
|
||||
image-filters: agg-stack-blu(2, 1);
|
||||
}
|
1
test/errorhandling/function_args.result
Normal file
1
test/errorhandling/function_args.result
Normal file
@ -0,0 +1 @@
|
||||
function_args.mss:3:38 unknown function agg-stack-blu(), did you mean agg-stack-blur(2)
|
3
test/errorhandling/invalid_color_in_fn.mss
Normal file
3
test/errorhandling/invalid_color_in_fn.mss
Normal file
@ -0,0 +1,3 @@
|
||||
#world {
|
||||
polygon-fill: spin(#f00f00f, 10);
|
||||
}
|
1
test/errorhandling/invalid_color_in_fn.result
Normal file
1
test/errorhandling/invalid_color_in_fn.result
Normal file
@ -0,0 +1 @@
|
||||
invalid_color_in_fn.mss:2:34 incorrect arguments given to spin()
|
@ -1 +1 @@
|
||||
invalid_property.mss:3:2 Unrecognized rule: polygonopacity
|
||||
invalid_property.mss:3:2 Unrecognized rule: polygonopacity. Did you mean polygon-opacity?
|
@ -1,3 +1,5 @@
|
||||
#world[zoom=5] {
|
||||
polygon-opacity: #f00;
|
||||
text-face-name: 2;
|
||||
line-rasterizer: 'full';
|
||||
text-name: 'foo';
|
||||
}
|
||||
|
@ -1 +1,2 @@
|
||||
invalid_value.mss:2:2 Invalid value for polygon-opacity, a valid float is expected. #ff0000 was given.
|
||||
invalid_value.mss:2:2 Invalid value for text-face-name, the type font is expected. 2 (of type float) was given.
|
||||
invalid_value.mss:3:2 Invalid value for line-rasterizer, the type keyword (options: full, fast) is expected. full (of type string) was given.
|
1
test/errorhandling/issue119.result
Normal file
1
test/errorhandling/issue119.result
Normal file
@ -0,0 +1 @@
|
||||
issue119.mss:2:2 Map properties are not permitted in other rules
|
@ -1 +1 @@
|
||||
issue123.mss:3:31 incorrect number of arguments for darken(). 2 expected.
|
||||
issue123.mss:3:31 incorrect number of arguments for darken(). 2 expected.
|
1
test/errorhandling/issue124.result
Normal file
1
test/errorhandling/issue124.result
Normal file
@ -0,0 +1 @@
|
||||
issue124.mss:6:0 missing closing `}`
|
8
test/errorhandling/issue297.mml
Normal file
8
test/errorhandling/issue297.mml
Normal file
@ -0,0 +1,8 @@
|
||||
{
|
||||
"Stylesheet": [
|
||||
"issue297.mss"
|
||||
],
|
||||
"Layer": [{
|
||||
"name": "t"
|
||||
}]
|
||||
}
|
4
test/errorhandling/issue297.mss
Normal file
4
test/errorhandling/issue297.mss
Normal file
@ -0,0 +1,4 @@
|
||||
#t {
|
||||
text-name: invalid;
|
||||
text-face-name: "Dejagnu";
|
||||
}
|
1
test/errorhandling/issue297.result
Normal file
1
test/errorhandling/issue297.result
Normal file
@ -0,0 +1 @@
|
||||
issue297.mss:2:2 Invalid value for text-name, the type expression is expected. invalid (of type keyword) was given.
|
3
test/errorhandling/issue_204_a.mss
Normal file
3
test/errorhandling/issue_204_a.mss
Normal file
@ -0,0 +1,3 @@
|
||||
#countries {
|
||||
polygon-fill: green;
|
||||
}}
|
1
test/errorhandling/issue_204_a.result
Normal file
1
test/errorhandling/issue_204_a.result
Normal file
@ -0,0 +1 @@
|
||||
issue_204_a.mss:3:1 missing opening `{`
|
3
test/errorhandling/issue_204_b.mss
Normal file
3
test/errorhandling/issue_204_b.mss
Normal file
@ -0,0 +1,3 @@
|
||||
#countries {
|
||||
polygon-fill: green;
|
||||
}}
|
1
test/errorhandling/issue_204_b.result
Normal file
1
test/errorhandling/issue_204_b.result
Normal file
@ -0,0 +1 @@
|
||||
issue_204_b.mss:3:3 missing opening `{`
|
3
test/errorhandling/issue_204_c.mss
Normal file
3
test/errorhandling/issue_204_c.mss
Normal file
@ -0,0 +1,3 @@
|
||||
#countries {
|
||||
polygon-fill: green;
|
||||
}}
|
1
test/errorhandling/issue_204_c.result
Normal file
1
test/errorhandling/issue_204_c.result
Normal file
@ -0,0 +1 @@
|
||||
issue_204_c.mss:4:0 missing opening `{`
|
5
test/errorhandling/issue_218.mss
Normal file
5
test/errorhandling/issue_218.mss
Normal file
@ -0,0 +1,5 @@
|
||||
#country-label {
|
||||
[one = 2]
|
||||
marker-width:1;
|
||||
}
|
||||
}
|
1
test/errorhandling/issue_218.result
Normal file
1
test/errorhandling/issue_218.result
Normal file
@ -0,0 +1 @@
|
||||
issue_218.mss:5:2 missing opening `{`
|
14
test/errorhandling/mapnik_keyword.mml
Normal file
14
test/errorhandling/mapnik_keyword.mml
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Stylesheet": [
|
||||
"mapnik_keyword.mss"
|
||||
],
|
||||
"Layer": [{
|
||||
"name": "world",
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Datasource": {
|
||||
"file": "http://tilemill-data.s3.amazonaws.com/test_data/shape_demo.zip",
|
||||
"type": "shape"
|
||||
}
|
||||
}]
|
||||
}
|
3
test/errorhandling/mapnik_keyword.mss
Normal file
3
test/errorhandling/mapnik_keyword.mss
Normal file
@ -0,0 +1,3 @@
|
||||
#world[[FOO]=nul] {
|
||||
line-width:2;
|
||||
}
|
1
test/errorhandling/mapnik_keyword.result
Normal file
1
test/errorhandling/mapnik_keyword.result
Normal file
@ -0,0 +1 @@
|
||||
mapnik_keyword.mss:1:6 nul is not a valid keyword in a filter expression
|
14
test/errorhandling/missing_close.mml
Normal file
14
test/errorhandling/missing_close.mml
Normal file
@ -0,0 +1,14 @@
|
||||
{
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Stylesheet": [
|
||||
"missing_close.mss"
|
||||
],
|
||||
"Layer": [{
|
||||
"name": "world",
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Datasource": {
|
||||
"file": "http://tilemill-data.s3.amazonaws.com/test_data/shape_demo.zip",
|
||||
"type": "shape"
|
||||
}
|
||||
}]
|
||||
}
|
4
test/errorhandling/missing_close.mss
Normal file
4
test/errorhandling/missing_close.mss
Normal file
@ -0,0 +1,4 @@
|
||||
#world[natural="water"
|
||||
{
|
||||
polygon-fill: blue;
|
||||
}
|
1
test/errorhandling/missing_close.result
Normal file
1
test/errorhandling/missing_close.result
Normal file
@ -0,0 +1 @@
|
||||
missing_close.mss:1:5 Missing closing ] of filter.
|
1
test/errorhandling/multi_stylesheets.result
Normal file
1
test/errorhandling/multi_stylesheets.result
Normal file
@ -0,0 +1 @@
|
||||
multi_stylesheets_b.mss:2:2 Unrecognized rule: polygonopacity. Did you mean polygon-opacity?
|
1
test/errorhandling/multi_stylesheets_b.result
Normal file
1
test/errorhandling/multi_stylesheets_b.result
Normal file
@ -0,0 +1 @@
|
||||
multi_stylesheets_b.mss:2:2 Unrecognized rule: polygonopacity. Did you mean polygon-opacity?
|
1
test/errorhandling/nopound.result
Normal file
1
test/errorhandling/nopound.result
Normal file
@ -0,0 +1 @@
|
||||
nopound.mss:1:0 Invalid code: world {
|
1
test/errorhandling/notenoughargs.result
Normal file
1
test/errorhandling/notenoughargs.result
Normal file
@ -0,0 +1 @@
|
||||
notenoughargs.mss:3:31 incorrect number of arguments for darken(). 2 expected.
|
@ -1,3 +1,3 @@
|
||||
undefined_variable.mss:2:16 variable @something is undefined
|
||||
undefined_variable.mss:3:14 variable @something is undefined
|
||||
undefined_variable.mss:4:22 variable @something is undefined
|
||||
undefined_variable.mss:4:22 variable @something is undefined
|
15
test/errorhandling/zoom_as_var.mml
Normal file
15
test/errorhandling/zoom_as_var.mml
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Stylesheet": [
|
||||
"zoom_as_var.mss"
|
||||
],
|
||||
"Layer": [{
|
||||
"id": "world",
|
||||
"name": "world",
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Datasource": {
|
||||
"file": "http://tilemill-data.s3.amazonaws.com/test_data/shape_demo.zip",
|
||||
"type": "shape"
|
||||
}
|
||||
}]
|
||||
}
|
3
test/errorhandling/zoom_as_var.mss
Normal file
3
test/errorhandling/zoom_as_var.mss
Normal file
@ -0,0 +1,3 @@
|
||||
#world[zoom=5] {
|
||||
polygon-opacity: zoom / 0.5;
|
||||
}
|
1
test/errorhandling/zoom_as_var.result
Normal file
1
test/errorhandling/zoom_as_var.result
Normal file
@ -0,0 +1 @@
|
||||
zoom_as_var.mss:2:2 Cannot do math with type keyword.
|
15
test/errorhandling/zoommax.mml
Normal file
15
test/errorhandling/zoommax.mml
Normal file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Stylesheet": [
|
||||
"zoommax.mss"
|
||||
],
|
||||
"Layer": [{
|
||||
"id": "world",
|
||||
"name": "world",
|
||||
"srs": "+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over",
|
||||
"Datasource": {
|
||||
"file": "http://tilemill-data.s3.amazonaws.com/test_data/shape_demo.zip",
|
||||
"type": "shape"
|
||||
}
|
||||
}]
|
||||
}
|
3
test/errorhandling/zoommax.mss
Normal file
3
test/errorhandling/zoommax.mss
Normal file
@ -0,0 +1,3 @@
|
||||
#world[zoom > 40] {
|
||||
polygon-fill: #fff;
|
||||
}
|
1
test/errorhandling/zoommax.result
Normal file
1
test/errorhandling/zoommax.result
Normal file
@ -0,0 +1 @@
|
||||
zoommax.mss:1:6 Only zoom levels between 0 and 22 supported.
|
18
test/field.test.js
Normal file
18
test/field.test.js
Normal file
@ -0,0 +1,18 @@
|
||||
var assert = require('assert');
|
||||
var tree = require('../lib/carto/tree.js');
|
||||
require('../lib/carto/tree/field');
|
||||
|
||||
describe('Field', function() {
|
||||
describe('basic functionality', function() {
|
||||
it('should be constructed', function() {
|
||||
var f = new tree.Field("foo");
|
||||
assert.ok(f);
|
||||
assert.equal(f.is, 'field');
|
||||
});
|
||||
it('should produce xml-friendly output', function() {
|
||||
var f = new tree.Field("bar");
|
||||
assert.ok(f);
|
||||
assert.equal(f.toString(), "[bar]");
|
||||
});
|
||||
});
|
||||
});
|
24
test/filter.test.js
Normal file
24
test/filter.test.js
Normal file
@ -0,0 +1,24 @@
|
||||
var assert = require('assert');
|
||||
var tree = require('../lib/carto/tree.js');
|
||||
require('../lib/carto/tree/field');
|
||||
require('../lib/carto/tree/dimension');
|
||||
require('../lib/carto/tree/filter');
|
||||
|
||||
describe('Field', function() {
|
||||
describe('basic functionality', function() {
|
||||
it('should be constructed', function() {
|
||||
var f = new tree.Filter(new tree.Field('foo'), '=', new tree.Dimension(1));
|
||||
assert.ok(f);
|
||||
});
|
||||
it('can be evaluated', function() {
|
||||
var f = new tree.Filter(new tree.Field('foo'), '=', new tree.Dimension(1));
|
||||
f.ev({});
|
||||
assert.ok(f);
|
||||
});
|
||||
it('yields xml', function() {
|
||||
var f = new tree.Filter(new tree.Field('foo'), '=', new tree.Dimension(1));
|
||||
f.ev({});
|
||||
assert.equal(f.toXML({}), '[foo] = 1');
|
||||
});
|
||||
});
|
||||
});
|
@ -3,229 +3,278 @@ var tree = require('../lib/carto/tree.js');
|
||||
require('../lib/carto/tree/filterset');
|
||||
|
||||
describe('Filtersets', function() {
|
||||
it('should add filters correctly', function() {
|
||||
var f = new tree.Filterset;
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '!=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '>9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '>=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '<90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '<=90');
|
||||
describe('basic functionality', function() {
|
||||
it('should be constructed', function() {
|
||||
var f = new tree.Filterset();
|
||||
assert.ok(f);
|
||||
assert.ok(f.filters);
|
||||
});
|
||||
it('yields xml', function() {
|
||||
var f = new tree.Filterset();
|
||||
assert.equal(f.toXML({}), '');
|
||||
});
|
||||
it('yields a clone', function() {
|
||||
var f = new tree.Filterset();
|
||||
assert.ok(f.clone() instanceof tree.Filterset);
|
||||
});
|
||||
});
|
||||
|
||||
var f = new tree.Filterset;
|
||||
f.add({ key: 'TOTAL', op: '=', val: '11' });
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '=11 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '=11 !=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '=11 >9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '=11 >=9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '=11 <90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '=11 <=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '=11 =90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '=11 !=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '=11 >90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '=11 >=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '=11 <9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '=11 <=9');
|
||||
describe('should add filters correctly', function() {
|
||||
it('adding to empty set', function() {
|
||||
var f = new tree.Filterset();
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '!=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '>9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '>=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '<90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '<=90');
|
||||
});
|
||||
|
||||
var f = new tree.Filterset;
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '!=11 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '!=11 !=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '!=11 !=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '!=11 !=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '!=11 >9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '!=11 >=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '!=11 >90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '!=11 >=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '!=11 <9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '!=11 <=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '!=11 <90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '!=11 <=90');
|
||||
it('adding to set with one and same key', function() {
|
||||
var f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '=', val: '11' });
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '=11 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '=11 !=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: '9' }), '=11 >9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: '9' }), '=11 >=9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: '90' }), '=11 <90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: '90' }), '=11 <=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '=11 =90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '=11 !=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '=11 >90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '=11 >=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '=11 <9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '=11 <=9');
|
||||
});
|
||||
|
||||
var f = new tree.Filterset;
|
||||
f.add({ key: 'TOTAL', op: '>', val: 11 });
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '>11 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '>11 !=11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '>11 >11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '>11 >=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '>11 <11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '>11 <=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '>11 =90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '>11 !=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '>11 >90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '>11 >=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '>11 <90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '>11 <=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '>11 =9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '>11 !=9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '>11 >9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '>11 >=9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '>11 <9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '>11 <=9');
|
||||
it('adding to set with one and same key and !=', function() {
|
||||
var f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '!=11 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '!=11 !=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '!=11 !=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '!=11 !=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '!=11 >9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '!=11 >=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '!=11 >90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '!=11 >=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '!=11 <9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '!=11 <=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '!=11 <90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '!=11 <=90');
|
||||
});
|
||||
|
||||
var f = new tree.Filterset;
|
||||
f.add({ key: 'TOTAL', op: '>=', val: 11 });
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '>=11 =11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '>=11 !=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '>=11 >11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '>=11 >=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '>=11 <11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '>=11 <=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '>=11 =90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '>=11 !=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '>=11 >90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '>=11 >=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '>=11 <90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '>=11 <=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '>=11 =9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '>=11 !=9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '>=11 >9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '>=11 >=9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '>=11 <9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '>=11 <=9');
|
||||
it('adding to set with one and same key and >', function() {
|
||||
var f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '>', val: 11 });
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '>11 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '>11 !=11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '>11 >11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '>11 >=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '>11 <11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '>11 <=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '>11 =90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '>11 !=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '>11 >90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '>11 >=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '>11 <90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '>11 <=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '>11 =9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '>11 !=9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '>11 >9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '>11 >=9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '>11 <9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '>11 <=9');
|
||||
});
|
||||
|
||||
var f = new tree.Filterset;
|
||||
f.add({ key: 'TOTAL', op: '<', val: 11 });
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '<11 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '<11 !=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '<11 >11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '<11 >=11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '<11 <11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '<11 <=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '<11 =90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '<11 !=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '<11 >90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '<11 >=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '<11 <90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '<11 <=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '<11 =9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '<11 !=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '<11 >9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '<11 >=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '<11 <9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '<11 <=9');
|
||||
it('adding to set with one and same key and >=', function() {
|
||||
var f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '>=', val: 11 });
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '>=11 =11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '>=11 !=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '>=11 >11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '>=11 >=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '>=11 <11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '>=11 <=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '>=11 =90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '>=11 !=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '>=11 >90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '>=11 >=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '>=11 <90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '>=11 <=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '>=11 =9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '>=11 !=9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '>=11 >9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '>=11 >=9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '>=11 <9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '>=11 <=9');
|
||||
});
|
||||
|
||||
var f = new tree.Filterset;
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 11 });
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '<=11 =11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '<=11 !=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '<=11 >11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '<=11 >=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '<=11 <11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '<=11 <=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '<=11 =90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '<=11 !=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '<=11 >90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '<=11 >=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '<=11 <90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '<=11 <=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '<=11 =9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '<=11 !=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '<=11 >9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '<=11 >=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '<=11 <9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '<=11 <=9');
|
||||
it('adding to set with one and same key and <', function() {
|
||||
var f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '<', val: 11 });
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '<11 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '<11 !=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '<11 >11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '<11 >=11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '<11 <11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '<11 <=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '<11 =90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '<11 !=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '<11 >90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '<11 >=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '<11 <90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '<11 <=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '<11 =9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '<11 !=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '<11 >9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '<11 >=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '<11 <9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '<11 <=9');
|
||||
});
|
||||
|
||||
var f = new tree.Filterset;
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 11 });
|
||||
f.add({ key: 'TOTAL', op: '>', val: 9 });
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '10' });
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '10' }), '<=11 >9 !=10 =10');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '10.5' }), '<=11 >9 !=10 =10.5');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '<=11 >9 !=10 =9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '8' }), '<=11 >9 !=10 =8');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '<=11 >9 !=10 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '10' }), '<=11 >9 !=10 !=10');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '10.5' }), '<=11 >9 !=10 !=10.5');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '<=11 >9 !=10 !=9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '8' }), '<=11 >9 !=10 !=8');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '<=11 >9 !=10 !=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '<=11 >9 !=10 >11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '<=11 >9 !=10 >=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '<=11 >9 !=10 <11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '<=11 >9 !=10 <=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '<=11 >9 !=10 >90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '<=11 >9 !=10 >=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '<=11 >9 !=10 <90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '<=11 >9 !=10 <=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '<=11 >9 !=10 >9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '<=11 >9 !=10 >=9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '<=11 >9 !=10 <9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '<=11 >9 !=10 <=9');
|
||||
});
|
||||
|
||||
|
||||
it('should add filtersets', function() {
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '=', val: '11' });
|
||||
assert.deepEqual(f, { 'TOTAL=': { key: 'TOTAL', op: '=', val: '11' }});
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '4' });
|
||||
assert.deepEqual(f, { 'TOTAL!=4': { key: 'TOTAL', op: '!=', val: '4' }});
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '>', val: '4' });
|
||||
assert.deepEqual(f, { 'TOTAL>': { key: 'TOTAL', op: '>', val: '4' }});
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '>=', val: '4' });
|
||||
assert.deepEqual(f, { 'TOTAL>=': { key: 'TOTAL', op: '>=', val: '4' }});
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '<', val: '4' });
|
||||
assert.deepEqual(f, { 'TOTAL<': { key: 'TOTAL', op: '<', val: '4' }});
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '<=', val: '4' });
|
||||
assert.deepEqual(f, { 'TOTAL<=': { key: 'TOTAL', op: '<=', val: '4' }});
|
||||
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '9' });
|
||||
assert.deepEqual(f, { 'TOTAL!=9': { key: 'TOTAL', op: '!=', val: '9' }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>', val: 9 });
|
||||
assert.deepEqual(f, { 'TOTAL>': { key: 'TOTAL', op: '>', val: 9 }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>', val: 11 });
|
||||
assert.deepEqual(f, { 'TOTAL>': { key: 'TOTAL', op: '>', val: 11 }});
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>', val: 90 });
|
||||
assert.deepEqual(f, { 'TOTAL>': { key: 'TOTAL', op: '>', val: 90 }});
|
||||
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>=', val: 9 });
|
||||
assert.deepEqual(f, { 'TOTAL>=': { key: 'TOTAL', op: '>=', val: 9 }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>=', val: 11 });
|
||||
assert.deepEqual(f, { 'TOTAL>': { key: 'TOTAL', op: '>', val: 11 }});
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>=', val: 90 });
|
||||
assert.deepEqual(f, { 'TOTAL>=': { key: 'TOTAL', op: '>=', val: 90 }});
|
||||
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<', val: 9 });
|
||||
assert.deepEqual(f, { 'TOTAL<': { key: 'TOTAL', op: '<', val: 9 }});
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<', val: 11 });
|
||||
assert.deepEqual(f, { 'TOTAL<': { key: 'TOTAL', op: '<', val: 11 }});
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<', val: 90 });
|
||||
assert.deepEqual(f, { 'TOTAL<': { key: 'TOTAL', op: '<', val: 90 }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 9 });
|
||||
assert.deepEqual(f, { 'TOTAL<=': { key: 'TOTAL', op: '<=', val: 9 }});
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 11 });
|
||||
assert.deepEqual(f, { 'TOTAL<': { key: 'TOTAL', op: '<', val: 11 }});
|
||||
|
||||
var f = new tree.Filterset; f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 90 });
|
||||
assert.deepEqual(f, { 'TOTAL<=': { key: 'TOTAL', op: '<=', val: 90 }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
|
||||
|
||||
// TODO: some more adding tests.
|
||||
});
|
||||
it('adding to set with one and same key and <=', function() {
|
||||
var f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 11 });
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '<=11 =11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '<=11 !=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '<=11 >11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '<=11 >=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '<=11 <11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '<=11 <=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '90' }), '<=11 =90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '90' }), '<=11 !=90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '<=11 >90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '<=11 >=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '<=11 <90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '<=11 <=90');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '<=11 =9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '<=11 !=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '<=11 >9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '<=11 >=9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '<=11 <9');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '<=11 <=9');
|
||||
});
|
||||
|
||||
it('adding to filterset with three filters', function() {
|
||||
var f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 11 });
|
||||
f.add({ key: 'TOTAL', op: '>', val: 9 });
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '10' });
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '10' }), '<=11 >9 !=10 =10');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '10.5' }), '<=11 >9 !=10 =10.5');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '9' }), '<=11 >9 !=10 =9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '=', val: '8' }), '<=11 >9 !=10 =8');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '=', val: '11' }), '<=11 >9 !=10 =11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '10' }), '<=11 >9 !=10 !=10');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '10.5' }), '<=11 >9 !=10 !=10.5');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '9' }), '<=11 >9 !=10 !=9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '!=', val: '8' }), '<=11 >9 !=10 !=8');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '!=', val: '11' }), '<=11 >9 !=10 !=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 11 }), '<=11 >9 !=10 >11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '>=', val: 11 }), '<=11 >9 !=10 >=11');
|
||||
assert.ok(true === f.addable({ key: 'TOTAL', op: '<', val: 11 }), '<=11 >9 !=10 <11');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 11 }), '<=11 >9 !=10 <=11');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>', val: 90 }), '<=11 >9 !=10 >90');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '>=', val: 90 }), '<=11 >9 !=10 >=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<', val: 90 }), '<=11 >9 !=10 <90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '<=', val: 90 }), '<=11 >9 !=10 <=90');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>', val: 9 }), '<=11 >9 !=10 >9');
|
||||
assert.ok(null === f.addable({ key: 'TOTAL', op: '>=', val: 9 }), '<=11 >9 !=10 >=9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<', val: 9 }), '<=11 >9 !=10 <9');
|
||||
assert.ok(false === f.addable({ key: 'TOTAL', op: '<=', val: 9 }), '<=11 >9 !=10 <=9');
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
it('should add filtersets', function() {
|
||||
var f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '=', val: '11' });
|
||||
assert.deepEqual(f.filters, { 'TOTAL=': { key: 'TOTAL', op: '=', val: '11' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '4' });
|
||||
assert.deepEqual(f.filters, { 'TOTAL!=4': { key: 'TOTAL', op: '!=', val: '4' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '>', val: '4' });
|
||||
assert.deepEqual(f.filters, { 'TOTAL>': { key: 'TOTAL', op: '>', val: '4' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '>=', val: '4' });
|
||||
assert.deepEqual(f.filters, { 'TOTAL>=': { key: 'TOTAL', op: '>=', val: '4' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '<', val: '4' });
|
||||
assert.deepEqual(f.filters, { 'TOTAL<': { key: 'TOTAL', op: '<', val: '4' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '<=', val: '4' });
|
||||
assert.deepEqual(f.filters, { 'TOTAL<=': { key: 'TOTAL', op: '<=', val: '4' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '9' });
|
||||
assert.deepEqual(f.filters, { 'TOTAL!=9': { key: 'TOTAL', op: '!=', val: '9' }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>', val: 9 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL>': { key: 'TOTAL', op: '>', val: 9 }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>', val: 11 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL>': { key: 'TOTAL', op: '>', val: 11 }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>', val: 90 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL>': { key: 'TOTAL', op: '>', val: 90 }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>=', val: 9 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL>=': { key: 'TOTAL', op: '>=', val: 9 }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>=', val: 11 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL>': { key: 'TOTAL', op: '>', val: 11 }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '>=', val: 90 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL>=': { key: 'TOTAL', op: '>=', val: 90 }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<', val: 9 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL<': { key: 'TOTAL', op: '<', val: 9 }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<', val: 11 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL<': { key: 'TOTAL', op: '<', val: 11 }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<', val: 90 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL<': { key: 'TOTAL', op: '<', val: 90 }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 9 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL<=': { key: 'TOTAL', op: '<=', val: 9 }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 11 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL<': { key: 'TOTAL', op: '<', val: 11 }});
|
||||
|
||||
f = new tree.Filterset();
|
||||
f.add({ key: 'TOTAL', op: '!=', val: '11' });
|
||||
f.add({ key: 'TOTAL', op: '<=', val: 90 });
|
||||
assert.deepEqual(f.filters, { 'TOTAL<=': { key: 'TOTAL', op: '<=', val: 90 }, 'TOTAL!=11': { key: 'TOTAL', op: '!=', val: '11' }});
|
||||
});
|
||||
});
|
||||
|
45
test/operation.test.js
Normal file
45
test/operation.test.js
Normal file
@ -0,0 +1,45 @@
|
||||
var assert = require('assert');
|
||||
var tree = require('../lib/carto/tree.js');
|
||||
require('../lib/carto/tree/operation');
|
||||
require('../lib/carto/tree/dimension');
|
||||
require('../lib/carto/tree/color');
|
||||
require('../lib/carto/tree/field');
|
||||
require('../lib/carto/tree/literal');
|
||||
require('../lib/carto/tree/quoted');
|
||||
|
||||
describe('Operation', function() {
|
||||
it('should work with percent', function() {
|
||||
var env = { ppi:72, error:function(err) { console.log(err.message); } };
|
||||
|
||||
var o = new tree.Operation("+", [ new tree.Dimension(2), new tree.Dimension(10, "%") ]);
|
||||
assert.equal(o.ev(env).value, 2.2);
|
||||
});
|
||||
|
||||
it('should work with units', function() {
|
||||
var env = { ppi:72, error:function(err) { console.log(err.message); } };
|
||||
|
||||
var o = new tree.Operation("+", [ new tree.Dimension(2.54, 'cm'), new tree.Dimension(0.0254, 'm') ]);
|
||||
assert.equal(o.ev(env).value, 144);
|
||||
|
||||
var o = new tree.Operation("+", [ new tree.Dimension(25.4, 'mm'), new tree.Dimension(72, 'pt') ]);
|
||||
assert.equal(o.ev(env).value, 144);
|
||||
|
||||
var o = new tree.Operation("+", [ new tree.Dimension(72, 'pt'), new tree.Dimension(6, 'pc') ]);
|
||||
assert.equal(o.ev(env).value, 144);
|
||||
});
|
||||
|
||||
it('should work with different ppi', function() {
|
||||
var env = { ppi:300, error:function(err) { console.log(err.message); } };
|
||||
|
||||
var o = new tree.Operation("+", [ new tree.Dimension(2.54, 'cm'), new tree.Dimension(0.0254, 'm') ]);
|
||||
assert.equal(o.ev(env).value, 600);
|
||||
|
||||
var o = new tree.Operation("+", [ new tree.Dimension(25.4, 'mm'), new tree.Dimension(72, 'pt') ]);
|
||||
assert.equal(o.ev(env).value, 600);
|
||||
|
||||
var o = new tree.Operation("+", [ new tree.Dimension(72, 'pt'), new tree.Dimension(6, 'pc') ]);
|
||||
assert.equal(o.ev(env).value, 600);
|
||||
});
|
||||
|
||||
|
||||
});
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user