commit 3eee130452f61b05eda6e4e7553b88a82430632a Author: Marc Harter Date: Wed May 4 12:19:55 2011 -0500 initial commit diff --git a/dbf.js b/dbf.js new file mode 100644 index 0000000..5e69d47 --- /dev/null +++ b/dbf.js @@ -0,0 +1,213 @@ +(function(window,undefined){ + + if(window.document && window.Worker){ + var worker = new Worker("dbf.js") + + var DBF = function(url, callback){ + var + w = this._worker = worker, + t = this + + w.onmessage = function(e){ + t.data = e.data + if (callback) callback(e.data); + }; + + w.postMessage(url) + } + + window["DBF"] = DBF + return + } + + var IN_WORKER = !window.document + if (IN_WORKER) { + importScripts('stream.js') + onmessage = function(e){ + new DBF(e.data); + }; + } + + var + DBASE_LEVEL = { + "3": "dBASE Level 5", + "4": "dBase Level 7" + }, + DBASE_FIELD_TYPE = { + "N": "Number", + "C": "Character", // binary + "L": "Logical", + "D": "Date", + "M": "Memo", // binary + "F": "Floating point", + "B": "Binary", + "G": "General", + "P": "Picture", + "Y": "Currency", + "T": "DateTime", + "I": "Integer", + "V": "VariField", + "X": "Variant", + "@": "Timestamp", + "O": "Double", + "+": "Autoincrement", // (dBase Level 7) + "O": "Double", // (dBase Level 7) + "@": "Timestamp" // (dBase Level 7) + } + + var DBF = function(url, callback){ + var xhr = new XMLHttpRequest(); + + xhr.open("GET", url, false) + xhr.overrideMimeType("text/plain; charset=x-user-defined") + xhr.send() + + if(200 != xhr.status) + throw "Unable to load " + url + " status: " + xhr.status + + this.stream = new Gordon.Stream(xhr.responseText) + this.callback = callback + + this.readFileHeader() + this.readFieldDescriptions() + this.readRecords() + + this._postMessage() + } + + DBF.prototype = { + constructor: DBF, + _postMessage: function() { + var data = { + header: this.header, + fields: this.fields, + records: this.records + } + if (IN_WORKER) postMessage(data) + else if (this.callback) this.callback(data) + }, + readFileHeader: function(){ + var s = this.stream, + header = this.header = {}, + date = new Date; + + header.version = DBASE_LEVEL[s.readSI8()] + + // Date of last update; in YYMMDD format. Each byte contains the number as a binary. YY is added to a base of 1900 decimal to determine the actual year. Therefore, YY has possible values from 0x00-0xFF, which allows for a range from 1900-2155. + date.setUTCFullYear(1900 + s.readSI8()) + date.setUTCMonth(s.readSI8()) + date.setUTCDate(s.readSI8()) + + header.lastUpdated = date + + // Number of records in file + header.numRecords = s.readSI32() + + // Position of first data record + header.firstRecordPosition = s.readSI16() + + // Length of one data record, including delete flag + header.recordLength = s.readSI16() + + // Reserved; filled with zeros + s.offset += 16 + + /* + Table flags: + 0x01 file has a structural .cdx + 0x02 file has a Memo field + 0x04 file is a database (.dbc) + This byte can contain the sum of any of the above values. For example, the value 0x03 indicates the table has a structural .cdx and a Memo field. + */ + header.flags = s.readSI8() + + // Code page mark + header.codePageMark = s.readSI8() + + // Reserved; filled with zeros. + s.offset += 2 + + }, + readFieldDescriptions: function(){ + var s = this.stream, + fields = [], + field + + while (s.readSI8() != 0x0D) { + s.offset-- + field = {} + + // Field name with a maximum of 10 characters. If less than 10, it is padded with null characters (0x00). + field.name = s.readString(11).replace(/\u0000/g,"") + + field.type = DBASE_FIELD_TYPE[s.readString(1)] + + // Displacement of field in record + field.fieldDisplacement = s.readSI32() + + // Length of field (in bytes) + field.fieldLength = s.readUI8() + + // Number of decimal places + field.decimals = s.readSI8() + + /* + Field flags: + 0x01 System Column (not visible to user) + 0x02 Column can store null values + 0x04 Binary column (for CHAR and MEMO only) + 0x06 (0x02+0x04) When a field is NULL and binary (Integer, Currency, and Character/Memo fields) + 0x0C Column is autoincrementing + */ + field.flags = s.readSI8() + + // Value of autoincrement Next value + field.autoincrementNextValue = s.readSI32() + + // Value of autoincrement Step value + field.autoincrementStepValue = s.readSI8() + + // Reserved + s.offset += 8 + + fields.push(field) + } + + this.fields = fields + + }, + readRecords: function(){ + var s = this.stream, + numRecords = this.header.numRecords, + recordsOffset = this.header.firstRecordPosition, + recordSize = this.header.recordLength, + fields = this.fields, + numFields = fields.length, + records = [], + field, record + + for (var index = 0; index < numRecords; index++) { + s.offset = recordsOffset + index * recordSize + + record = {} + + // Data records begin with a delete flag byte. If this byte is an ASCII space (0x20), the record is not deleted. If the first byte is an asterisk (0x2A), the record is deleted + record._isDeleted = s.readSI8() == 42 + + for(var i = 0; i < numFields; i++){ + field = fields[i] + record[field.name] = s.readString(field.fieldLength).trim(); + } + + records.push(record); + } + + this.records = records + + } + } + + window["DBF"] = DBF; + +})(self) + diff --git a/index.html b/index.html new file mode 100644 index 0000000..238600e --- /dev/null +++ b/index.html @@ -0,0 +1,52 @@ + + + + js-shapefile-to-geojson Demo Page + + + +

js-shapefile-to-geojson Demo Page

+

Pure client-side JavaScript (no server side code) parsing of shapefiles and dbase files to GeoJSON format displayed using OpenLayers.

+
+

View project at http://github.com/wavded/js-shapefile-to-geojson. + + + + + + + + diff --git a/readme.md b/readme.md new file mode 100644 index 0000000..6a701a4 --- /dev/null +++ b/readme.md @@ -0,0 +1,60 @@ +This project allows a user to load Shapefiles and DBFs into the browser with JavaScript. +Outputs as [GeoJSON](http://geojson.org/) for use with other Mapping APIs such as [OpenLayers](http://openlayers.org). + +Inspired by the excellent work by Tom Carden ([http://github.com/RandomEtc/shapefile-js/](http://github.com/RandomEtc/shapefile-js/)). + +### Overview + +I just got this out there so nothing is minified. See index.html for an example of order. All files need to be in the same directory. This will use Web Workers if the browser support exists. Not recommended for large files, more of an experiment than anything. + +### Usage + +You can use it to parse shapefiles (.shp) or dBase files (.dbf) or both. Here are some examples. + +Load Shapefile Only + + var shapefile = new Shapefile("myshapefile.shp",function(data){ + // data returned + }; + +Load DBF Only + + var dbf = new DBF("mydbf.dbf",function(data){ + // data returned + }; + +Load Shapefile w/ DBF Attributes + + var shapefile = new Shapefile({ + shp: "myshape.dbf", + dbf: "myshape.dbf" + }, function(data){ + // data returned + }; + +Use with OpenLayers + + var + parser = new OpenLayer.Format.GeoJSON(), + features, + shapefile = new Shapefile({ + shp: "myshape.dbf", + dbf: "myshape.dbf" + }, function(data){ + features = parser.read(data.geojson); + }; + +### Resources + +I used the technical descriptions found here to parse the binary: + +> [ESRI Shapefile Technical Description - PDF](http://www.esri.com/library/whitepapers/pdfs/shapefile.pdf) + +> [dBase (Xbase) File Format Description](http://www.dbf2002.com/dbf-file-format.html) + +### Future + +I plan to implement (time permitting) some custom renderers like SVG or Canvas (besides using OpenLayers) to improve the speed. + +Feel free to hack at this, submit bugs, pull requests, and make it better. If you write a renderer, please push it back and I'll add it to the project. + diff --git a/shapefile.js b/shapefile.js new file mode 100644 index 0000000..f050d4e --- /dev/null +++ b/shapefile.js @@ -0,0 +1,324 @@ +(function(window,undefined){ + + if(window.document && window.Worker){ + var worker = new Worker("shapefile.js") + + var Shapefile = function(o, callback){ + var + w = this.worker = worker, + t = this, + o = typeof o == "string" ? {shp: o} : o + + w.onmessage = function(e){ + t.data = e.date + if(callback) callback(e.data) + } + + w.postMessage(["Load", o]) + + if(o.dbf) this.dbf = new DBF(o.dbf,function(data){ + w.postMessage(["Add DBF Attributes", data]) + }) + } + + window["Shapefile"] = Shapefile + return + } + + var IN_WORKER = !window.document + if (IN_WORKER) { + importScripts('stream.js') + onmessage = function(e){ + switch (e.data[0]) { + case "Load": + window.shapefile = new Shapefile(e.data[1]) + break + case "Add DBF Attributes": + window.shapefile.addDBFDataToGeoJSON(e.data[1]) + window.shapefile._postMessage() + break + default: + } + }; + } + + var SHAPE_TYPES = { + "0": "Null Shape", + "1": "Point", // standard shapes + "3": "PolyLine", + "5": "Polygon", + "8": "MultiPoint", + "11": "PointZ", // 3d shapes + "13": "PolyLineZ", + "15": "PolygonZ", + "18": "MultiPointZ", + "21": "PointM", // user-defined measurement shapes + "23": "PolyLineM", + "25": "PolygonM", + "28": "MultiPointM", + "31": "MultiPatch" + } + + var Shapefile = function(o,callback){ + var xhr = new XMLHttpRequest(), + that = this, + o = typeof o == "string" ? {shp: o} : o + + xhr.open("GET", o.shp, false) + xhr.overrideMimeType("text/plain; charset=x-user-defined") + xhr.send() + + if(200 != xhr.status) + throw "Unable to load " + o.shp + " status: " + xhr.status + + this.url = o.shp + this.stream = new Gordon.Stream(xhr.responseText) + this.callback = callback + + this.readFileHeader() + this.readRecords() + this.formatIntoGeoJson() + + if(o.dbf) this.dbf = IN_WORKER ? + null : + new DBF(o.dbf,function(data){ + that.addDBFDataToGeoJSON(data) + that._postMessage() + }) + else this._postMessage + } + + Shapefile.prototype = { + constructor: Shapefile, + _postMessage: function() { + var data = { + header: this.header, + records: this.records, + dbf: this.dbf, + geojson: this.geojson + } + if (IN_WORKER) postMessage(data) + else if (this.callback) this.callback(data) + }, + readFileHeader: function(){ + var s = this.stream, + header = this.header = {} + + // The main file header is fixed at 100 bytes in length + if(s < 100) throw "Invalid Header Length" + + // File code (always hex value 0x0000270a) + header.fileCode = s.readSI32(true) + + if(header.fileCode != parseInt(0x0000270a)) + throw "Invalid File Code" + + // Unused; five uint32 + s.offset += 4 * 5 + + // File length (in 16-bit words, including the header) + header.fileLength = s.readSI32(true) * 2 + + header.version = s.readSI32() + + header.shapeType = SHAPE_TYPES[s.readSI32()] + + // Minimum bounding rectangle (MBR) of all shapes contained within the shapefile; four doubles in the following order: min X, min Y, max X, max Y + this._readBounds(header) + + // Z axis range + header.rangeZ = { + min: s.readDouble(), + max: s.readDouble() + } + + // User defined measurement range + header.rangeM = { + min: s.readDouble(), + max: s.readDouble() + } + + }, + readRecords: function(){ + var s = this.stream, + records = this.records = [], + record + + do { + record = {} + + // Record number (1-based) + record.id = s.readSI32(true) + + if(record.id == 0) break //no more records + + // Record length (in 16-bit words) + record.length = s.readSI32(true) * 2 + + record.shapeType = SHAPE_TYPES[s.readSI32()] + + // Read specific shape + this["_read" + record.shapeType](record); + + records.push(record); + + } while(true); + + }, + _readBounds: function(object){ + var s = this.stream + + object.bounds = { + left: s.readDouble(), + bottom: s.readDouble(), + right: s.readDouble(), + top: s.readDouble() + } + + return object + }, + _readParts: function(record){ + var s = this.stream, + nparts, + parts = [] + + nparts = record.numParts = s.readSI32() + + // since number of points always proceeds number of parts, capture it now + record.numPoints = s.readSI32() + + // parts array indicates at which index the next part starts at + while(nparts--) parts.push(s.readSI32()) + + record.parts = parts + + return record + }, + _readPoint: function(record){ + var s = this.stream + + record.x = s.readDouble() + record.y = s.readDouble() + + return record + }, + _readPoints: function(record){ + var s = this.stream, + points = [], + npoints = record.numPoints || (record.numPoints = s.readSI32()) + + while(npoints--) + points.push({ + x: s.readDouble(), + y: s.readDouble() + }) + + record.points = points + + return record + }, + _readMultiPoint: function(record){ + var s = this.stream + + this._readBounds(record) + this._readPoints(record) + + return record + }, + _readPolygon: function(record){ + var s = this.stream + + this._readBounds(record) + this._readParts(record) + this._readPoints(record) + + return record + }, + _readPolyLine: function(record){ + return this._readPolygon(record); + }, + formatIntoGeoJson: function(){ + var bounds = this.header.bounds, + records = this.records, + features = [], + feature, geometry, points, fbounds, gcoords, parts, point, + geojson = {} + + geojson.type = "FeatureCollection" + geojson.bbox = [ + bounds.left, + bounds.bottom, + bounds.right, + bounds.top + ] + geojson.features = features + + for (var r = 0, record; record = records[r]; r++){ + feature = {}, fbounds = record.bounds, points = record.points, parts = record.parts + feature.type = "Feature" + feature.bbox = [ + fbounds.left, + fbounds.bottom, + fbounds.right, + fbounds.top + ] + geometry = feature.geometry = {} + + switch (record.shapeType) { + case "Point": + geometry.type = "Point" + geometry.coordinates = [ + record.points.x, + record.points,y ] + break + case "MultiPoint": + case "PolyLine": + geometry.type = (record.shapeType == "PolyLine" ? "LineString" : "MultiPoint") + gcoords = geometry.coordinates = [] + + for (var p = 0; p < points.length; p++){ + var point = points[p] + gcoords.push([point.x,point.y]) + } + break + case "Polygon": + geometry.type = "Polygon" + gcoords = geometry.coordinates = [] + + for (var pt = 0; pt < parts.length; pt++){ + var partIndex = parts[pt], + part = [], + point + + // partIndex 0 == main poly, partIndex > 0 == holes in poly + for (var p = partIndex; p < (parts[pt+1] || points.length); p++){ + point = points[p] + part.push([point.x,point.y]) + } + gcoords.push(part) + } + break + default: + } + features.push(feature) + } + this.geojson = geojson + + if(this._addDataAfterLoad) this.addDBFDataToGeoJSON(this._addDataAfterLoad); + }, + addDBFDataToGeoJSON: function(dbfData){ + if(!this.geojson) return (this._addDataAfterLoad = dbfData) + + this.dbf = dbfData + + var features = this.geojson.features, + len = features.length, + records = dbfData.records + + while(len--) features[len].properties = records[len] + } + } + + window["Shapefile"] = Shapefile; +})(self) + diff --git a/stream.js b/stream.js new file mode 100644 index 0000000..9666ddf --- /dev/null +++ b/stream.js @@ -0,0 +1,486 @@ +/* + Stream Reader from Gordon.JS + Copyright (c) 2010 Tobias Schneider + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. +*/ + +var win = self, + doc = win.document, + fromCharCode = String.fromCharCode, + push = Array.prototype.push, + min = Math.min, + max = Math.max; + +(function(window,undefined){ + + window.Gordon = {}; + + var DEFLATE_CODE_LENGTH_ORDER = [16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15], + DEFLATE_CODE_LENGHT_MAP = [ + [0, 3], [0, 4], [0, 5], [0, 6], [0, 7], [0, 8], [0, 9], [0, 10], [1, 11], [1, 13], [1, 15], [1, 17], + [2, 19], [2, 23], [2, 27], [2, 31], [3, 35], [3, 43], [3, 51], [3, 59], [4, 67], [4, 83], [4, 99], + [4, 115], [5, 131], [5, 163], [5, 195], [5, 227], [0, 258] + ], + DEFLATE_DISTANCE_MAP = [ + [0, 1], [0, 2], [0, 3], [0, 4], [1, 5], [1, 7], [2, 9], [2, 13], [3, 17], [3, 25], [4, 33], [4, 49], + [5, 65], [5, 97], [6, 129], [6, 193], [7, 257], [7, 385], [8, 513], [8, 769], [9, 1025], [9, 1537], + [10, 2049], [10, 3073], [11, 4097], [11, 6145], [12, 8193], [12, 12289], [13, 16385], [13, 24577] + ]; + + Gordon.Stream = function(data){ + var buff = [], + t = this, + i = t.length = data.length; + t.offset = 0; + for(var i = 0; data[i]; i++){ buff.push(fromCharCode(data.charCodeAt(i) & 0xff)); } + t._buffer = buff.join(''); + t._bitBuffer = null; + t._bitOffset = 8; + }; + Gordon.Stream.prototype = { + readByteAt: function(pos){ + return this._buffer.charCodeAt(pos); + }, + + readNumber: function(numBytes, bigEnd){ + var t = this, + val = 0; + if(bigEnd){ + while(numBytes--){ val = (val << 8) | t.readByteAt(t.offset++); } + }else{ + var o = t.offset, + i = o + numBytes; + while(i > o){ val = (val << 8) | t.readByteAt(--i); } + t.offset += numBytes; + } + t.align(); + return val; + }, + + readSNumber: function(numBytes, bigEnd){ + var val = this.readNumber(numBytes, bigEnd), + numBits = numBytes * 8; + if(val >> (numBits - 1)){ val -= Math.pow(2, numBits); } + return val; + }, + + readSI8: function(){ + return this.readSNumber(1); + }, + + readSI16: function(bigEnd){ + return this.readSNumber(2, bigEnd); + }, + + readSI32: function(bigEnd){ + return this.readSNumber(4, bigEnd); + }, + + readUI8: function(){ + return this.readNumber(1); + }, + + readUI16: function(bigEnd){ + return this.readNumber(2, bigEnd); + }, + + readUI24: function(bigEnd){ + return this.readNumber(3, bigEnd); + }, + + readUI32: function(bigEnd){ + return this.readNumber(4, bigEnd); + }, + + readFixed: function(){ + return this._readFixedPoint(32, 16); + }, + + _readFixedPoint: function(numBits, precision){ + return this.readSB(numBits) * Math.pow(2, -precision); + }, + + readFixed8: function(){ + return this._readFixedPoint(16, 8); + }, + + readFloat: function(){ + return this._readFloatingPoint(8, 23); + }, + + _readFloatingPoint: function(numEBits, numSBits){ + var numBits = 1 + numEBits + numSBits, + numBytes = numBits / 8, + t = this, + val = 0.0; + if(numBytes > 4){ + var i = Math.ceil(numBytes / 4); + while(i--){ + var buff = [], + o = t.offset, + j = o + (numBytes >= 4 ? 4 : numBytes % 4); + while(j > o){ + buff.push(t.readByteAt(--j)); + numBytes--; + t.offset++; + } + } + var s = new Gordon.Stream(fromCharCode.apply(String, buff)), + sign = s.readUB(1), + expo = s.readUB(numEBits), + mantis = 0, + i = numSBits; + while(i--){ + if(s.readBool()){ mantis += Math.pow(2, i); } + } + }else{ + var sign = t.readUB(1), + expo = t.readUB(numEBits), + mantis = t.readUB(numSBits); + } + if(sign || expo || mantis){ + var maxExpo = Math.pow(2, numEBits), + bias = ~~((maxExpo - 1) / 2), + scale = Math.pow(2, numSBits), + fract = mantis / scale; + if(bias){ + if(bias < maxExpo){ val = Math.pow(2, expo - bias) * (1 + fract); } + else if(fract){ val = NaN; } + else{ val = Infinity; } + }else if(fract){ val = Math.pow(2, 1 - bias) * fract; } + if(NaN != val && sign){ val *= -1; } + } + return val; + }, + + readFloat16: function(){ + return this._readFloatingPoint(5, 10); + }, + + readDouble: function(){ + return this._readFloatingPoint(11, 52); + }, + + readEncodedU32: function(){ + var val = 0; + for(var i = 0; i < 5; i++){ + var num = this.readByteAt(this._offset++); + val = val | ((num & 0x7f) << (7 * i)); + if(!(num & 0x80)){ break; } + } + return val; + }, + + readSB: function(numBits){ + var val = this.readUB(numBits); + if(val >> (numBits - 1)){ val -= Math.pow(2, numBits); } + return val; + }, + + readUB: function(numBits, lsb){ + var t = this, + val = 0; + for(var i = 0; i < numBits; i++){ + if(8 == t._bitOffset){ + t._bitBuffer = t.readUI8(); + t._bitOffset = 0; + } + if(lsb){ val |= (t._bitBuffer & (0x01 << t._bitOffset++) ? 1 : 0) << i; } + else{ val = (val << 1) | (t._bitBuffer & (0x80 >> t._bitOffset++) ? 1 : 0); } + } + return val; + }, + + readFB: function(numBits){ + return this._readFixedPoint(numBits, 16); + }, + + readString: function(numChars){ + var t = this, + b = t._buffer; + if(undefined != numChars){ + var str = b.substr(t.offset, numChars); + t.offset += numChars; + }else{ + var chars = [], + i = t.length - t.offset; + while(i--){ + var code = t.readByteAt(t.offset++); + if(code){ chars.push(fromCharCode(code)); } + else{ break; } + } + var str = chars.join(''); + } + return str; + }, + + readBool: function(numBits){ + return !!this.readUB(numBits || 1); + }, + + seek: function(offset, absolute){ + var t = this; + t.offset = (absolute ? 0 : t.offset) + offset; + t.align(); + return t; + }, + + align: function(){ + this._bitBuffer = null; + this._bitOffset = 8; + return this; + }, + + readLanguageCode: function(){ + return this.readUI8(); + }, + + readRGB: function(){ + return { + red: this.readUI8(), + green: this.readUI8(), + blue: this.readUI8() + } + }, + + readRGBA: function(){ + var rgba = this.readRGB(); + rgba.alpha = this.readUI8() / 255; + return rgba; + }, + + readARGB: function(){ + var alpha = this.readUI8() / 255, + rgba = this.readRGB(); + rgba.alpha = alpha; + return rgba; + }, + + readRect: function(){ + var t = this; + numBits = t.readUB(5), + rect = { + left: t.readSB(numBits), + right: t.readSB(numBits), + top: t.readSB(numBits), + bottom: t.readSB(numBits) + }; + t.align(); + return rect; + }, + + readMatrix: function(){ + var t = this, + hasScale = t.readBool(); + if(hasScale){ + var numBits = t.readUB(5), + scaleX = t.readFB(numBits), + scaleY = t.readFB(numBits); + }else{ var scaleX = scaleY = 1.0; } + var hasRotation = t.readBool(); + if(hasRotation){ + var numBits = t.readUB(5), + skewX = t.readFB(numBits), + skewY = t.readFB(numBits); + }else{ var skewX = skewY = 0.0; } + var numBits = t.readUB(5); + matrix = { + scaleX: scaleX, scaleY: scaleY, + skewX: skewX, skewY: skewY, + moveX: t.readSB(numBits), moveY: t.readSB(numBits) + }; + t.align(); + return matrix; + }, + + readCxform: function(){ + return this._readCxf(); + }, + + readCxformA: function(){ + return this._readCxf(true); + }, + + _readCxf: function(withAlpha){ + var t = this; + hasAddTerms = t.readBool(), + hasMultTerms = t.readBool(), + numBits = t.readUB(4); + if(hasMultTerms){ + var multR = t.readSB(numBits) / 256, + multG = t.readSB(numBits) / 256, + multB = t.readSB(numBits) / 256, + multA = withAlpha ? t.readSB(numBits) / 256 : 1; + }else{ var multR = multG = multB = multA = 1; } + if(hasAddTerms){ + var addR = t.readSB(numBits), + addG = t.readSB(numBits), + addB = t.readSB(numBits), + addA = withAlpha ? t.readSB(numBits) / 256 : 0; + }else{ var addR = addG = addB = addA = 0; } + var cxform = { + multR: multR, multG: multG, multB: multB, multA: multA, + addR: addR, addG: addG, addB: addB, addA: addA + } + t.align(); + return cxform; + }, + + decompress: function(){ + var t = this, + b = t._buffer, + o = t.offset, + data = b.substr(0, o) + t.unzip(); + t.length = data.length; + t.offset = o; + t._buffer = data; + return t; + }, + + unzip: function uz(raw){ + var t = this, + buff = [], + o = DEFLATE_CODE_LENGTH_ORDER, + m = DEFLATE_CODE_LENGHT_MAP, + d = DEFLATE_DISTANCE_MAP; + t.seek(2); + do{ + var isFinal = t.readUB(1, true), + type = t.readUB(2, true); + if(type){ + if(1 == type){ + var distTable = uz.fixedDistTable, + litTable = uz.fixedLitTable; + if(!distTable){ + var bitLengths = []; + for(var i = 0; i < 32; i++){ bitLengths.push(5); } + distTable = uz.fixedDistTable = buildHuffTable(bitLengths); + } + if(!litTable){ + var bitLengths = []; + for(var i = 0; i <= 143; i++){ bitLengths.push(8); } + for(; i <= 255; i++){ bitLengths.push(9); } + for(; i <= 279; i++){ bitLengths.push(7); } + for(; i <= 287; i++){ bitLengths.push(8); } + litTable = uz.fixedLitTable = buildHuffTable(bitLengths); + } + }else{ + var numLitLengths = t.readUB(5, true) + 257, + numDistLengths = t.readUB(5, true) + 1, + numCodeLenghts = t.readUB(4, true) + 4, + codeLengths = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; + for(var i = 0; i < numCodeLenghts; i++){ codeLengths[o[i]] = t.readUB(3, true); } + var codeTable = buildHuffTable(codeLengths), + litLengths = [], + prevCodeLen = 0, + maxLengths = numLitLengths + numDistLengths; + while(litLengths.length < maxLengths){ + var sym = decodeSymbol(t, codeTable); + switch(sym){ + case 16: + var i = t.readUB(2, true) + 3; + while(i--){ litLengths.push(prevCodeLen); } + break; + case 17: + var i = t.readUB(3, true) + 3; + while(i--){ litLengths.push(0); } + break; + case 18: + var i = t.readUB(7, true) + 11; + while(i--){ litLengths.push(0); } + break; + default: + if(sym <= 15){ + litLengths.push(sym); + prevCodeLen = sym; + } + } + } + var distTable = buildHuffTable(litLengths.splice(numLitLengths, numDistLengths)), + litTable = buildHuffTable(litLengths); + } + do{ + var sym = decodeSymbol(t, litTable); + if(sym < 256){ buff.push(raw ? sym : fromCharCode(sym)); } + else if(sym > 256){ + var lengthMap = m[sym - 257], + len = lengthMap[1] + t.readUB(lengthMap[0], true), + distMap = d[decodeSymbol(t, distTable)], + dist = distMap[1] + t.readUB(distMap[0], true), + i = buff.length - dist; + while(len--){ buff.push(buff[i++]); } + } + }while(256 != sym); + }else{ + t.align(); + var len = t.readUI16(), + nlen = t.readUI16(); + if(raw){ while(len--){ buff.push(t.readUI8()); } } + else{ buff.push(t.readString(len)); } + } + }while(!isFinal); + t.seek(4); + return raw ? buff : buff.join(''); + } + }; + + function buildHuffTable(bitLengths){ + var numLengths = bitLengths.length, + blCount = [], + maxBits = max.apply(Math, bitLengths), + nextCode = [], + code = 0, + table = {}, + i = numLengths; + while(i--){ + var len = bitLengths[i]; + blCount[len] = (blCount[len] || 0) + (len > 0); + } + for(var i = 1; i <= maxBits; i++){ + var len = i - 1; + if(undefined == blCount[len]){ blCount[len] = 0; } + code = (code + blCount[i - 1]) << 1; + nextCode[i] = code; + } + for(var i = 0; i < numLengths; i++){ + var len = bitLengths[i]; + if(len){ + table[nextCode[len]] = { + length: len, + symbol: i + }; + nextCode[len]++; + } + } + return table; + } + + function decodeSymbol(s, table) { + var code = 0, + len = 0; + while(true){ + code = (code << 1) | s.readUB(1, true); + len++; + var entry = table[code]; + if(undefined != entry && entry.length == len){ return entry.symbol } + } + } +})(this); + diff --git a/testdata/world.dbf b/testdata/world.dbf new file mode 100644 index 0000000..900b271 Binary files /dev/null and b/testdata/world.dbf differ diff --git a/testdata/world.shp b/testdata/world.shp new file mode 100644 index 0000000..de9a3a6 Binary files /dev/null and b/testdata/world.shp differ