0) {
+ if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); }
+ while(i >= 0) {
+ if(p < k) {
+ d = (this[i]&((1<>(p+=this.DB-k);
+ }
+ else {
+ d = (this[i]>>(p-=k))&km;
+ if(p <= 0) { p += this.DB; --i; }
+ }
+ if(d > 0) m = true;
+ if(m) r += int2char(d);
+ }
+ }
+ return m?r:"0";
+ }
+
+ // (public) -this
+ function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }
+
+ // (public) |this|
+ function bnAbs() { return (this.s<0)?this.negate():this; }
+
+ // (public) return + if this > a, - if this < a, 0 if equal
+ function bnCompareTo(a) {
+ var r = this.s-a.s;
+ if(r != 0) return r;
+ var i = this.t;
+ r = i-a.t;
+ if(r != 0) return (this.s<0)?-r:r;
+ while(--i >= 0) if((r=this[i]-a[i]) != 0) return r;
+ return 0;
+ }
+
+ // returns bit length of the integer x
+ function nbits(x) {
+ var r = 1, t;
+ if((t=x>>>16) != 0) { x = t; r += 16; }
+ if((t=x>>8) != 0) { x = t; r += 8; }
+ if((t=x>>4) != 0) { x = t; r += 4; }
+ if((t=x>>2) != 0) { x = t; r += 2; }
+ if((t=x>>1) != 0) { x = t; r += 1; }
+ return r;
+ }
+
+ // (public) return the number of bits in "this"
+ function bnBitLength() {
+ if(this.t <= 0) return 0;
+ return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM));
+ }
+
+ // (protected) r = this << n*DB
+ function bnpDLShiftTo(n,r) {
+ var i;
+ for(i = this.t-1; i >= 0; --i) r[i+n] = this[i];
+ for(i = n-1; i >= 0; --i) r[i] = 0;
+ r.t = this.t+n;
+ r.s = this.s;
+ }
+
+ // (protected) r = this >> n*DB
+ function bnpDRShiftTo(n,r) {
+ for(var i = n; i < this.t; ++i) r[i-n] = this[i];
+ r.t = Math.max(this.t-n,0);
+ r.s = this.s;
+ }
+
+ // (protected) r = this << n
+ function bnpLShiftTo(n,r) {
+ var bs = n%this.DB;
+ var cbs = this.DB-bs;
+ var bm = (1<= 0; --i) {
+ r[i+ds+1] = (this[i]>>cbs)|c;
+ c = (this[i]&bm)<= 0; --i) r[i] = 0;
+ r[ds] = c;
+ r.t = this.t+ds+1;
+ r.s = this.s;
+ r.clamp();
+ }
+
+ // (protected) r = this >> n
+ function bnpRShiftTo(n,r) {
+ r.s = this.s;
+ var ds = Math.floor(n/this.DB);
+ if(ds >= this.t) { r.t = 0; return; }
+ var bs = n%this.DB;
+ var cbs = this.DB-bs;
+ var bm = (1<>bs;
+ for(var i = ds+1; i < this.t; ++i) {
+ r[i-ds-1] |= (this[i]&bm)<>bs;
+ }
+ if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<>= this.DB;
+ }
+ if(a.t < this.t) {
+ c -= a.s;
+ while(i < this.t) {
+ c += this[i];
+ r[i++] = c&this.DM;
+ c >>= this.DB;
+ }
+ c += this.s;
+ }
+ else {
+ c += this.s;
+ while(i < a.t) {
+ c -= a[i];
+ r[i++] = c&this.DM;
+ c >>= this.DB;
+ }
+ c -= a.s;
+ }
+ r.s = (c<0)?-1:0;
+ if(c < -1) r[i++] = this.DV+c;
+ else if(c > 0) r[i++] = c;
+ r.t = i;
+ r.clamp();
+ }
+
+ // (protected) r = this * a, r != this,a (HAC 14.12)
+ // "this" should be the larger one if appropriate.
+ function bnpMultiplyTo(a,r) {
+ var x = this.abs(), y = a.abs();
+ var i = x.t;
+ r.t = i+y.t;
+ while(--i >= 0) r[i] = 0;
+ for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t);
+ r.s = 0;
+ r.clamp();
+ if(this.s != a.s) BigInteger.ZERO.subTo(r,r);
+ }
+
+ // (protected) r = this^2, r != this (HAC 14.16)
+ function bnpSquareTo(r) {
+ var x = this.abs();
+ var i = r.t = 2*x.t;
+ while(--i >= 0) r[i] = 0;
+ for(i = 0; i < x.t-1; ++i) {
+ var c = x.am(i,x[i],r,2*i,0,1);
+ if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {
+ r[i+x.t] -= x.DV;
+ r[i+x.t+1] = 1;
+ }
+ }
+ if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1);
+ r.s = 0;
+ r.clamp();
+ }
+
+ // (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
+ // r != q, this != m. q or r may be null.
+ function bnpDivRemTo(m,q,r) {
+ var pm = m.abs();
+ if(pm.t <= 0) return;
+ var pt = this.abs();
+ if(pt.t < pm.t) {
+ if(q != null) q.fromInt(0);
+ if(r != null) this.copyTo(r);
+ return;
+ }
+ if(r == null) r = nbi();
+ var y = nbi(), ts = this.s, ms = m.s;
+ var nsh = this.DB-nbits(pm[pm.t-1]); // normalize modulus
+ if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); }
+ else { pm.copyTo(y); pt.copyTo(r); }
+ var ys = y.t;
+ var y0 = y[ys-1];
+ if(y0 == 0) return;
+ var yt = y0*(1<1)?y[ys-2]>>this.F2:0);
+ var d1 = this.FV/yt, d2 = (1<= 0) {
+ r[r.t++] = 1;
+ r.subTo(t,r);
+ }
+ BigInteger.ONE.dlShiftTo(ys,t);
+ t.subTo(y,y); // "negative" y so we can replace sub with am later
+ while(y.t < ys) y[y.t++] = 0;
+ while(--j >= 0) {
+ // Estimate quotient digit
+ var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2);
+ if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) { // Try it out
+ y.dlShiftTo(j,t);
+ r.subTo(t,r);
+ while(r[i] < --qd) r.subTo(t,r);
+ }
+ }
+ if(q != null) {
+ r.drShiftTo(ys,q);
+ if(ts != ms) BigInteger.ZERO.subTo(q,q);
+ }
+ r.t = ys;
+ r.clamp();
+ if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder
+ if(ts < 0) BigInteger.ZERO.subTo(r,r);
+ }
+
+ // (public) this mod a
+ function bnMod(a) {
+ var r = nbi();
+ this.abs().divRemTo(a,null,r);
+ if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);
+ return r;
+ }
+
+ // Modular reduction using "classic" algorithm
+ function Classic(m) { this.m = m; }
+ function cConvert(x) {
+ if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);
+ else return x;
+ }
+ function cRevert(x) { return x; }
+ function cReduce(x) { x.divRemTo(this.m,null,x); }
+ function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+ function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+ Classic.prototype.convert = cConvert;
+ Classic.prototype.revert = cRevert;
+ Classic.prototype.reduce = cReduce;
+ Classic.prototype.mulTo = cMulTo;
+ Classic.prototype.sqrTo = cSqrTo;
+
+ // (protected) return "-1/this % 2^DB"; useful for Mont. reduction
+ // justification:
+ // xy == 1 (mod m)
+ // xy = 1+km
+ // xy(2-xy) = (1+km)(1-km)
+ // x[y(2-xy)] = 1-k^2m^2
+ // x[y(2-xy)] == 1 (mod m^2)
+ // if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
+ // should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
+ // JS multiply "overflows" differently from C/C++, so care is needed here.
+ function bnpInvDigit() {
+ if(this.t < 1) return 0;
+ var x = this[0];
+ if((x&1) == 0) return 0;
+ var y = x&3; // y == 1/x mod 2^2
+ y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4
+ y = (y*(2-(x&0xff)*y))&0xff; // y == 1/x mod 2^8
+ y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff; // y == 1/x mod 2^16
+ // last step - calculate inverse mod DV directly;
+ // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
+ y = (y*(2-x*y%this.DV))%this.DV; // y == 1/x mod 2^dbits
+ // we really want the negative inverse, and -DV < y < DV
+ return (y>0)?this.DV-y:-y;
+ }
+
+ // Montgomery reduction
+ function Montgomery(m) {
+ this.m = m;
+ this.mp = m.invDigit();
+ this.mpl = this.mp&0x7fff;
+ this.mph = this.mp>>15;
+ this.um = (1<<(m.DB-15))-1;
+ this.mt2 = 2*m.t;
+ }
+
+ // xR mod m
+ function montConvert(x) {
+ var r = nbi();
+ x.abs().dlShiftTo(this.m.t,r);
+ r.divRemTo(this.m,null,r);
+ if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);
+ return r;
+ }
+
+ // x/R mod m
+ function montRevert(x) {
+ var r = nbi();
+ x.copyTo(r);
+ this.reduce(r);
+ return r;
+ }
+
+ // x = x/R mod m (HAC 14.32)
+ function montReduce(x) {
+ while(x.t <= this.mt2) // pad x so am has enough room later
+ x[x.t++] = 0;
+ for(var i = 0; i < this.m.t; ++i) {
+ // faster way of calculating u0 = x[i]*mp mod DV
+ var j = x[i]&0x7fff;
+ var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM;
+ // use am to combine the multiply-shift-add into one call
+ j = i+this.m.t;
+ x[j] += this.m.am(0,u0,x,i,0,this.m.t);
+ // propagate carry
+ while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; }
+ }
+ x.clamp();
+ x.drShiftTo(this.m.t,x);
+ if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
+ }
+
+ // r = "x^2/R mod m"; x != r
+ function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+ // r = "xy/R mod m"; x,y != r
+ function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+
+ Montgomery.prototype.convert = montConvert;
+ Montgomery.prototype.revert = montRevert;
+ Montgomery.prototype.reduce = montReduce;
+ Montgomery.prototype.mulTo = montMulTo;
+ Montgomery.prototype.sqrTo = montSqrTo;
+
+ // (protected) true iff this is even
+ function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; }
+
+ // (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
+ function bnpExp(e,z) {
+ if(e > 0xffffffff || e < 1) return BigInteger.ONE;
+ var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;
+ g.copyTo(r);
+ while(--i >= 0) {
+ z.sqrTo(r,r2);
+ if((e&(1< 0) z.mulTo(r2,g,r);
+ else { var t = r; r = r2; r2 = t; }
+ }
+ return z.revert(r);
+ }
+
+ // (public) this^e % m, 0 <= e < 2^32
+ function bnModPowInt(e,m) {
+ var z;
+ if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);
+ return this.exp(e,z);
+ }
+
+ // protected
+ BigInteger.prototype.copyTo = bnpCopyTo;
+ BigInteger.prototype.fromInt = bnpFromInt;
+ BigInteger.prototype.fromString = bnpFromString;
+ BigInteger.prototype.clamp = bnpClamp;
+ BigInteger.prototype.dlShiftTo = bnpDLShiftTo;
+ BigInteger.prototype.drShiftTo = bnpDRShiftTo;
+ BigInteger.prototype.lShiftTo = bnpLShiftTo;
+ BigInteger.prototype.rShiftTo = bnpRShiftTo;
+ BigInteger.prototype.subTo = bnpSubTo;
+ BigInteger.prototype.multiplyTo = bnpMultiplyTo;
+ BigInteger.prototype.squareTo = bnpSquareTo;
+ BigInteger.prototype.divRemTo = bnpDivRemTo;
+ BigInteger.prototype.invDigit = bnpInvDigit;
+ BigInteger.prototype.isEven = bnpIsEven;
+ BigInteger.prototype.exp = bnpExp;
+
+ // public
+ BigInteger.prototype.toString = bnToString;
+ BigInteger.prototype.negate = bnNegate;
+ BigInteger.prototype.abs = bnAbs;
+ BigInteger.prototype.compareTo = bnCompareTo;
+ BigInteger.prototype.bitLength = bnBitLength;
+ BigInteger.prototype.mod = bnMod;
+ BigInteger.prototype.modPowInt = bnModPowInt;
+
+ // "constants"
+ BigInteger.ZERO = nbv(0);
+ BigInteger.ONE = nbv(1);
+
+ // Copyright (c) 2005-2009 Tom Wu
+ // All Rights Reserved.
+ // See "LICENSE" for details.
+
+ // Extended JavaScript BN functions, required for RSA private ops.
+
+ // Version 1.1: new BigInteger("0", 10) returns "proper" zero
+ // Version 1.2: square() API, isProbablePrime fix
+
+ // (public)
+ function bnClone() { var r = nbi(); this.copyTo(r); return r; }
+
+ // (public) return value as integer
+ function bnIntValue() {
+ if(this.s < 0) {
+ if(this.t == 1) return this[0]-this.DV;
+ else if(this.t == 0) return -1;
+ }
+ else if(this.t == 1) return this[0];
+ else if(this.t == 0) return 0;
+ // assumes 16 < DB < 32
+ return ((this[1]&((1<<(32-this.DB))-1))<>24; }
+
+ // (public) return value as short (assumes DB>=16)
+ function bnShortValue() { return (this.t==0)?this.s:(this[0]<<16)>>16; }
+
+ // (protected) return x s.t. r^x < DV
+ function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); }
+
+ // (public) 0 if this == 0, 1 if this > 0
+ function bnSigNum() {
+ if(this.s < 0) return -1;
+ else if(this.t <= 0 || (this.t == 1 && this[0] <= 0)) return 0;
+ else return 1;
+ }
+
+ // (protected) convert to radix string
+ function bnpToRadix(b) {
+ if(b == null) b = 10;
+ if(this.signum() == 0 || b < 2 || b > 36) return "0";
+ var cs = this.chunkSize(b);
+ var a = Math.pow(b,cs);
+ var d = nbv(a), y = nbi(), z = nbi(), r = "";
+ this.divRemTo(d,y,z);
+ while(y.signum() > 0) {
+ r = (a+z.intValue()).toString(b).substr(1) + r;
+ y.divRemTo(d,y,z);
+ }
+ return z.intValue().toString(b) + r;
+ }
+
+ // (protected) convert from radix string
+ function bnpFromRadix(s,b) {
+ this.fromInt(0);
+ if(b == null) b = 10;
+ var cs = this.chunkSize(b);
+ var d = Math.pow(b,cs), mi = false, j = 0, w = 0;
+ for(var i = 0; i < s.length; ++i) {
+ var x = intAt(s,i);
+ if(x < 0) {
+ if(s.charAt(i) == "-" && this.signum() == 0) mi = true;
+ continue;
+ }
+ w = b*w+x;
+ if(++j >= cs) {
+ this.dMultiply(d);
+ this.dAddOffset(w,0);
+ j = 0;
+ w = 0;
+ }
+ }
+ if(j > 0) {
+ this.dMultiply(Math.pow(b,j));
+ this.dAddOffset(w,0);
+ }
+ if(mi) BigInteger.ZERO.subTo(this,this);
+ }
+
+ // (protected) alternate constructor
+ function bnpFromNumber(a,b,c) {
+ if("number" == typeof b) {
+ // new BigInteger(int,int,RNG)
+ if(a < 2) this.fromInt(1);
+ else {
+ this.fromNumber(a,c);
+ if(!this.testBit(a-1)) // force MSB set
+ this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this);
+ if(this.isEven()) this.dAddOffset(1,0); // force odd
+ while(!this.isProbablePrime(b)) {
+ this.dAddOffset(2,0);
+ if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this);
+ }
+ }
+ }
+ else {
+ // new BigInteger(int,RNG)
+ var x = new Array(), t = a&7;
+ x.length = (a>>3)+1;
+ b.nextBytes(x);
+ if(t > 0) x[0] &= ((1< 0) {
+ if(p < this.DB && (d = this[i]>>p) != (this.s&this.DM)>>p)
+ r[k++] = d|(this.s<<(this.DB-p));
+ while(i >= 0) {
+ if(p < 8) {
+ d = (this[i]&((1<>(p+=this.DB-8);
+ }
+ else {
+ d = (this[i]>>(p-=8))&0xff;
+ if(p <= 0) { p += this.DB; --i; }
+ }
+ if((d&0x80) != 0) d |= -256;
+ if(k == 0 && (this.s&0x80) != (d&0x80)) ++k;
+ if(k > 0 || d != this.s) r[k++] = d;
+ }
+ }
+ return r;
+ }
+
+ function bnEquals(a) { return(this.compareTo(a)==0); }
+ function bnMin(a) { return(this.compareTo(a)<0)?this:a; }
+ function bnMax(a) { return(this.compareTo(a)>0)?this:a; }
+
+ // (protected) r = this op a (bitwise)
+ function bnpBitwiseTo(a,op,r) {
+ var i, f, m = Math.min(a.t,this.t);
+ for(i = 0; i < m; ++i) r[i] = op(this[i],a[i]);
+ if(a.t < this.t) {
+ f = a.s&this.DM;
+ for(i = m; i < this.t; ++i) r[i] = op(this[i],f);
+ r.t = this.t;
+ }
+ else {
+ f = this.s&this.DM;
+ for(i = m; i < a.t; ++i) r[i] = op(f,a[i]);
+ r.t = a.t;
+ }
+ r.s = op(this.s,a.s);
+ r.clamp();
+ }
+
+ // (public) this & a
+ function op_and(x,y) { return x&y; }
+ function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; }
+
+ // (public) this | a
+ function op_or(x,y) { return x|y; }
+ function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; }
+
+ // (public) this ^ a
+ function op_xor(x,y) { return x^y; }
+ function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; }
+
+ // (public) this & ~a
+ function op_andnot(x,y) { return x&~y; }
+ function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; }
+
+ // (public) ~this
+ function bnNot() {
+ var r = nbi();
+ for(var i = 0; i < this.t; ++i) r[i] = this.DM&~this[i];
+ r.t = this.t;
+ r.s = ~this.s;
+ return r;
+ }
+
+ // (public) this << n
+ function bnShiftLeft(n) {
+ var r = nbi();
+ if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r);
+ return r;
+ }
+
+ // (public) this >> n
+ function bnShiftRight(n) {
+ var r = nbi();
+ if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r);
+ return r;
+ }
+
+ // return index of lowest 1-bit in x, x < 2^31
+ function lbit(x) {
+ if(x == 0) return -1;
+ var r = 0;
+ if((x&0xffff) == 0) { x >>= 16; r += 16; }
+ if((x&0xff) == 0) { x >>= 8; r += 8; }
+ if((x&0xf) == 0) { x >>= 4; r += 4; }
+ if((x&3) == 0) { x >>= 2; r += 2; }
+ if((x&1) == 0) ++r;
+ return r;
+ }
+
+ // (public) returns index of lowest 1-bit (or -1 if none)
+ function bnGetLowestSetBit() {
+ for(var i = 0; i < this.t; ++i)
+ if(this[i] != 0) return i*this.DB+lbit(this[i]);
+ if(this.s < 0) return this.t*this.DB;
+ return -1;
+ }
+
+ // return number of 1 bits in x
+ function cbit(x) {
+ var r = 0;
+ while(x != 0) { x &= x-1; ++r; }
+ return r;
+ }
+
+ // (public) return number of set bits
+ function bnBitCount() {
+ var r = 0, x = this.s&this.DM;
+ for(var i = 0; i < this.t; ++i) r += cbit(this[i]^x);
+ return r;
+ }
+
+ // (public) true iff nth bit is set
+ function bnTestBit(n) {
+ var j = Math.floor(n/this.DB);
+ if(j >= this.t) return(this.s!=0);
+ return((this[j]&(1<<(n%this.DB)))!=0);
+ }
+
+ // (protected) this op (1<>= this.DB;
+ }
+ if(a.t < this.t) {
+ c += a.s;
+ while(i < this.t) {
+ c += this[i];
+ r[i++] = c&this.DM;
+ c >>= this.DB;
+ }
+ c += this.s;
+ }
+ else {
+ c += this.s;
+ while(i < a.t) {
+ c += a[i];
+ r[i++] = c&this.DM;
+ c >>= this.DB;
+ }
+ c += a.s;
+ }
+ r.s = (c<0)?-1:0;
+ if(c > 0) r[i++] = c;
+ else if(c < -1) r[i++] = this.DV+c;
+ r.t = i;
+ r.clamp();
+ }
+
+ // (public) this + a
+ function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; }
+
+ // (public) this - a
+ function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; }
+
+ // (public) this * a
+ function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; }
+
+ // (public) this^2
+ function bnSquare() { var r = nbi(); this.squareTo(r); return r; }
+
+ // (public) this / a
+ function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; }
+
+ // (public) this % a
+ function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; }
+
+ // (public) [this/a,this%a]
+ function bnDivideAndRemainder(a) {
+ var q = nbi(), r = nbi();
+ this.divRemTo(a,q,r);
+ return new Array(q,r);
+ }
+
+ // (protected) this *= n, this >= 0, 1 < n < DV
+ function bnpDMultiply(n) {
+ this[this.t] = this.am(0,n-1,this,0,0,this.t);
+ ++this.t;
+ this.clamp();
+ }
+
+ // (protected) this += n << w words, this >= 0
+ function bnpDAddOffset(n,w) {
+ if(n == 0) return;
+ while(this.t <= w) this[this.t++] = 0;
+ this[w] += n;
+ while(this[w] >= this.DV) {
+ this[w] -= this.DV;
+ if(++w >= this.t) this[this.t++] = 0;
+ ++this[w];
+ }
+ }
+
+ // A "null" reducer
+ function NullExp() {}
+ function nNop(x) { return x; }
+ function nMulTo(x,y,r) { x.multiplyTo(y,r); }
+ function nSqrTo(x,r) { x.squareTo(r); }
+
+ NullExp.prototype.convert = nNop;
+ NullExp.prototype.revert = nNop;
+ NullExp.prototype.mulTo = nMulTo;
+ NullExp.prototype.sqrTo = nSqrTo;
+
+ // (public) this^e
+ function bnPow(e) { return this.exp(e,new NullExp()); }
+
+ // (protected) r = lower n words of "this * a", a.t <= n
+ // "this" should be the larger one if appropriate.
+ function bnpMultiplyLowerTo(a,n,r) {
+ var i = Math.min(this.t+a.t,n);
+ r.s = 0; // assumes a,this >= 0
+ r.t = i;
+ while(i > 0) r[--i] = 0;
+ var j;
+ for(j = r.t-this.t; i < j; ++i) r[i+this.t] = this.am(0,a[i],r,i,0,this.t);
+ for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a[i],r,i,0,n-i);
+ r.clamp();
+ }
+
+ // (protected) r = "this * a" without lower n words, n > 0
+ // "this" should be the larger one if appropriate.
+ function bnpMultiplyUpperTo(a,n,r) {
+ --n;
+ var i = r.t = this.t+a.t-n;
+ r.s = 0; // assumes a,this >= 0
+ while(--i >= 0) r[i] = 0;
+ for(i = Math.max(n-this.t,0); i < a.t; ++i)
+ r[this.t+i-n] = this.am(n-i,a[i],r,0,0,this.t+i-n);
+ r.clamp();
+ r.drShiftTo(1,r);
+ }
+
+ // Barrett modular reduction
+ function Barrett(m) {
+ // setup Barrett
+ this.r2 = nbi();
+ this.q3 = nbi();
+ BigInteger.ONE.dlShiftTo(2*m.t,this.r2);
+ this.mu = this.r2.divide(m);
+ this.m = m;
+ }
+
+ function barrettConvert(x) {
+ if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m);
+ else if(x.compareTo(this.m) < 0) return x;
+ else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; }
+ }
+
+ function barrettRevert(x) { return x; }
+
+ // x = x mod m (HAC 14.42)
+ function barrettReduce(x) {
+ x.drShiftTo(this.m.t-1,this.r2);
+ if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); }
+ this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3);
+ this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);
+ while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1);
+ x.subTo(this.r2,x);
+ while(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
+ }
+
+ // r = x^2 mod m; x != r
+ function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
+
+ // r = x*y mod m; x,y != r
+ function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
+
+ Barrett.prototype.convert = barrettConvert;
+ Barrett.prototype.revert = barrettRevert;
+ Barrett.prototype.reduce = barrettReduce;
+ Barrett.prototype.mulTo = barrettMulTo;
+ Barrett.prototype.sqrTo = barrettSqrTo;
+
+ // (public) this^e % m (HAC 14.85)
+ function bnModPow(e,m) {
+ var i = e.bitLength(), k, r = nbv(1), z;
+ if(i <= 0) return r;
+ else if(i < 18) k = 1;
+ else if(i < 48) k = 3;
+ else if(i < 144) k = 4;
+ else if(i < 768) k = 5;
+ else k = 6;
+ if(i < 8)
+ z = new Classic(m);
+ else if(m.isEven())
+ z = new Barrett(m);
+ else
+ z = new Montgomery(m);
+
+ // precomputation
+ var g = new Array(), n = 3, k1 = k-1, km = (1< 1) {
+ var g2 = nbi();
+ z.sqrTo(g[1],g2);
+ while(n <= km) {
+ g[n] = nbi();
+ z.mulTo(g2,g[n-2],g[n]);
+ n += 2;
+ }
+ }
+
+ var j = e.t-1, w, is1 = true, r2 = nbi(), t;
+ i = nbits(e[j])-1;
+ while(j >= 0) {
+ if(i >= k1) w = (e[j]>>(i-k1))&km;
+ else {
+ w = (e[j]&((1<<(i+1))-1))<<(k1-i);
+ if(j > 0) w |= e[j-1]>>(this.DB+i-k1);
+ }
+
+ n = k;
+ while((w&1) == 0) { w >>= 1; --n; }
+ if((i -= n) < 0) { i += this.DB; --j; }
+ if(is1) { // ret == 1, don't bother squaring or multiplying it
+ g[w].copyTo(r);
+ is1 = false;
+ }
+ else {
+ while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; }
+ if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; }
+ z.mulTo(r2,g[w],r);
+ }
+
+ while(j >= 0 && (e[j]&(1< 0) {
+ x.rShiftTo(g,x);
+ y.rShiftTo(g,y);
+ }
+ while(x.signum() > 0) {
+ if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x);
+ if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y);
+ if(x.compareTo(y) >= 0) {
+ x.subTo(y,x);
+ x.rShiftTo(1,x);
+ }
+ else {
+ y.subTo(x,y);
+ y.rShiftTo(1,y);
+ }
+ }
+ if(g > 0) y.lShiftTo(g,y);
+ return y;
+ }
+
+ // (protected) this % n, n < 2^26
+ function bnpModInt(n) {
+ if(n <= 0) return 0;
+ var d = this.DV%n, r = (this.s<0)?n-1:0;
+ if(this.t > 0)
+ if(d == 0) r = this[0]%n;
+ else for(var i = this.t-1; i >= 0; --i) r = (d*r+this[i])%n;
+ return r;
+ }
+
+ // (public) 1/this % m (HAC 14.61)
+ function bnModInverse(m) {
+ var ac = m.isEven();
+ if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO;
+ var u = m.clone(), v = this.clone();
+ var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1);
+ while(u.signum() != 0) {
+ while(u.isEven()) {
+ u.rShiftTo(1,u);
+ if(ac) {
+ if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); }
+ a.rShiftTo(1,a);
+ }
+ else if(!b.isEven()) b.subTo(m,b);
+ b.rShiftTo(1,b);
+ }
+ while(v.isEven()) {
+ v.rShiftTo(1,v);
+ if(ac) {
+ if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); }
+ c.rShiftTo(1,c);
+ }
+ else if(!d.isEven()) d.subTo(m,d);
+ d.rShiftTo(1,d);
+ }
+ if(u.compareTo(v) >= 0) {
+ u.subTo(v,u);
+ if(ac) a.subTo(c,a);
+ b.subTo(d,b);
+ }
+ else {
+ v.subTo(u,v);
+ if(ac) c.subTo(a,c);
+ d.subTo(b,d);
+ }
+ }
+ if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO;
+ if(d.compareTo(m) >= 0) return d.subtract(m);
+ if(d.signum() < 0) d.addTo(m,d); else return d;
+ if(d.signum() < 0) return d.add(m); else return d;
+ }
+
+ var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,727,733,739,743,751,757,761,769,773,787,797,809,811,821,823,827,829,839,853,857,859,863,877,881,883,887,907,911,919,929,937,941,947,953,967,971,977,983,991,997];
+ var lplim = (1<<26)/lowprimes[lowprimes.length-1];
+
+ // (public) test primality with certainty >= 1-.5^t
+ function bnIsProbablePrime(t) {
+ var i, x = this.abs();
+ if(x.t == 1 && x[0] <= lowprimes[lowprimes.length-1]) {
+ for(i = 0; i < lowprimes.length; ++i)
+ if(x[0] == lowprimes[i]) return true;
+ return false;
+ }
+ if(x.isEven()) return false;
+ i = 1;
+ while(i < lowprimes.length) {
+ var m = lowprimes[i], j = i+1;
+ while(j < lowprimes.length && m < lplim) m *= lowprimes[j++];
+ m = x.modInt(m);
+ while(i < j) if(m%lowprimes[i++] == 0) return false;
+ }
+ return x.millerRabin(t);
+ }
+
+ // (protected) true if probably prime (HAC 4.24, Miller-Rabin)
+ function bnpMillerRabin(t) {
+ var n1 = this.subtract(BigInteger.ONE);
+ var k = n1.getLowestSetBit();
+ if(k <= 0) return false;
+ var r = n1.shiftRight(k);
+ t = (t+1)>>1;
+ if(t > lowprimes.length) t = lowprimes.length;
+ var a = nbi();
+ for(var i = 0; i < t; ++i) {
+ //Pick bases at random, instead of starting at 2
+ a.fromInt(lowprimes[Math.floor(Math.random()*lowprimes.length)]);
+ var y = a.modPow(r,this);
+ if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) {
+ var j = 1;
+ while(j++ < k && y.compareTo(n1) != 0) {
+ y = y.modPowInt(2,this);
+ if(y.compareTo(BigInteger.ONE) == 0) return false;
+ }
+ if(y.compareTo(n1) != 0) return false;
+ }
+ }
+ return true;
+ }
+
+ // protected
+ BigInteger.prototype.chunkSize = bnpChunkSize;
+ BigInteger.prototype.toRadix = bnpToRadix;
+ BigInteger.prototype.fromRadix = bnpFromRadix;
+ BigInteger.prototype.fromNumber = bnpFromNumber;
+ BigInteger.prototype.bitwiseTo = bnpBitwiseTo;
+ BigInteger.prototype.changeBit = bnpChangeBit;
+ BigInteger.prototype.addTo = bnpAddTo;
+ BigInteger.prototype.dMultiply = bnpDMultiply;
+ BigInteger.prototype.dAddOffset = bnpDAddOffset;
+ BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo;
+ BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo;
+ BigInteger.prototype.modInt = bnpModInt;
+ BigInteger.prototype.millerRabin = bnpMillerRabin;
+
+ // public
+ BigInteger.prototype.clone = bnClone;
+ BigInteger.prototype.intValue = bnIntValue;
+ BigInteger.prototype.byteValue = bnByteValue;
+ BigInteger.prototype.shortValue = bnShortValue;
+ BigInteger.prototype.signum = bnSigNum;
+ BigInteger.prototype.toByteArray = bnToByteArray;
+ BigInteger.prototype.equals = bnEquals;
+ BigInteger.prototype.min = bnMin;
+ BigInteger.prototype.max = bnMax;
+ BigInteger.prototype.and = bnAnd;
+ BigInteger.prototype.or = bnOr;
+ BigInteger.prototype.xor = bnXor;
+ BigInteger.prototype.andNot = bnAndNot;
+ BigInteger.prototype.not = bnNot;
+ BigInteger.prototype.shiftLeft = bnShiftLeft;
+ BigInteger.prototype.shiftRight = bnShiftRight;
+ BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit;
+ BigInteger.prototype.bitCount = bnBitCount;
+ BigInteger.prototype.testBit = bnTestBit;
+ BigInteger.prototype.setBit = bnSetBit;
+ BigInteger.prototype.clearBit = bnClearBit;
+ BigInteger.prototype.flipBit = bnFlipBit;
+ BigInteger.prototype.add = bnAdd;
+ BigInteger.prototype.subtract = bnSubtract;
+ BigInteger.prototype.multiply = bnMultiply;
+ BigInteger.prototype.divide = bnDivide;
+ BigInteger.prototype.remainder = bnRemainder;
+ BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder;
+ BigInteger.prototype.modPow = bnModPow;
+ BigInteger.prototype.modInverse = bnModInverse;
+ BigInteger.prototype.pow = bnPow;
+ BigInteger.prototype.gcd = bnGCD;
+ BigInteger.prototype.isProbablePrime = bnIsProbablePrime;
+
+ // JSBN-specific extension
+ BigInteger.prototype.square = bnSquare;
+
+ // Expose the Barrett function
+ BigInteger.prototype.Barrett = Barrett
+
+ // BigInteger interfaces not implemented in jsbn:
+
+ // BigInteger(int signum, byte[] magnitude)
+ // double doubleValue()
+ // float floatValue()
+ // int hashCode()
+ // long longValue()
+ // static BigInteger valueOf(long val)
+
+ // Random number generator - requires a PRNG backend, e.g. prng4.js
+
+ // For best results, put code like
+ //
+ // in your main HTML document.
+
+ var rng_state;
+ var rng_pool;
+ var rng_pptr;
+
+ // Mix in a 32-bit integer into the pool
+ function rng_seed_int(x) {
+ rng_pool[rng_pptr++] ^= x & 255;
+ rng_pool[rng_pptr++] ^= (x >> 8) & 255;
+ rng_pool[rng_pptr++] ^= (x >> 16) & 255;
+ rng_pool[rng_pptr++] ^= (x >> 24) & 255;
+ if(rng_pptr >= rng_psize) rng_pptr -= rng_psize;
+ }
+
+ // Mix in the current time (w/milliseconds) into the pool
+ function rng_seed_time() {
+ rng_seed_int(new Date().getTime());
+ }
+
+ // Initialize the pool with junk if needed.
+ if(rng_pool == null) {
+ rng_pool = new Array();
+ rng_pptr = 0;
+ var t;
+ if(typeof window !== "undefined" && window.crypto) {
+ if (window.crypto.getRandomValues) {
+ // Use webcrypto if available
+ var ua = new Uint8Array(32);
+ window.crypto.getRandomValues(ua);
+ for(t = 0; t < 32; ++t)
+ rng_pool[rng_pptr++] = ua[t];
+ }
+ else if(navigator.appName == "Netscape" && navigator.appVersion < "5") {
+ // Extract entropy (256 bits) from NS4 RNG if available
+ var z = window.crypto.random(32);
+ for(t = 0; t < z.length; ++t)
+ rng_pool[rng_pptr++] = z.charCodeAt(t) & 255;
+ }
+ }
+ while(rng_pptr < rng_psize) { // extract some randomness from Math.random()
+ t = Math.floor(65536 * Math.random());
+ rng_pool[rng_pptr++] = t >>> 8;
+ rng_pool[rng_pptr++] = t & 255;
+ }
+ rng_pptr = 0;
+ rng_seed_time();
+ //rng_seed_int(window.screenX);
+ //rng_seed_int(window.screenY);
+ }
+
+ function rng_get_byte() {
+ if(rng_state == null) {
+ rng_seed_time();
+ rng_state = prng_newstate();
+ rng_state.init(rng_pool);
+ for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr)
+ rng_pool[rng_pptr] = 0;
+ rng_pptr = 0;
+ //rng_pool = null;
+ }
+ // TODO: allow reseeding after first request
+ return rng_state.next();
+ }
+
+ function rng_get_bytes(ba) {
+ var i;
+ for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte();
+ }
+
+ function SecureRandom() {}
+
+ SecureRandom.prototype.nextBytes = rng_get_bytes;
+
+ // prng4.js - uses Arcfour as a PRNG
+
+ function Arcfour() {
+ this.i = 0;
+ this.j = 0;
+ this.S = new Array();
+ }
+
+ // Initialize arcfour context from key, an array of ints, each from [0..255]
+ function ARC4init(key) {
+ var i, j, t;
+ for(i = 0; i < 256; ++i)
+ this.S[i] = i;
+ j = 0;
+ for(i = 0; i < 256; ++i) {
+ j = (j + this.S[i] + key[i % key.length]) & 255;
+ t = this.S[i];
+ this.S[i] = this.S[j];
+ this.S[j] = t;
+ }
+ this.i = 0;
+ this.j = 0;
+ }
+
+ function ARC4next() {
+ var t;
+ this.i = (this.i + 1) & 255;
+ this.j = (this.j + this.S[this.i]) & 255;
+ t = this.S[this.i];
+ this.S[this.i] = this.S[this.j];
+ this.S[this.j] = t;
+ return this.S[(t + this.S[this.i]) & 255];
+ }
+
+ Arcfour.prototype.init = ARC4init;
+ Arcfour.prototype.next = ARC4next;
+
+ // Plug in your RNG constructor here
+ function prng_newstate() {
+ return new Arcfour();
+ }
+
+ // Pool size must be a multiple of 4 and greater than 32.
+ // An array of bytes the size of the pool will be passed to init()
+ var rng_psize = 256;
+
+ BigInteger.SecureRandom = SecureRandom;
+ BigInteger.BigInteger = BigInteger;
+ if (typeof exports !== 'undefined') {
+ exports = module.exports = BigInteger;
+ } else {
+ this.BigInteger = BigInteger;
+ this.SecureRandom = SecureRandom;
+ }
+
+}).call(this);
diff --git a/familyark/app/node_modules/jsbn/package.json b/familyark/app/node_modules/jsbn/package.json
new file mode 100644
index 0000000..beb3a7a
--- /dev/null
+++ b/familyark/app/node_modules/jsbn/package.json
@@ -0,0 +1,53 @@
+{
+ "_from": "jsbn@~0.1.0",
+ "_id": "jsbn@0.1.1",
+ "_inBundle": false,
+ "_integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=",
+ "_location": "/jsbn",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "jsbn@~0.1.0",
+ "name": "jsbn",
+ "escapedName": "jsbn",
+ "rawSpec": "~0.1.0",
+ "saveSpec": null,
+ "fetchSpec": "~0.1.0"
+ },
+ "_requiredBy": [
+ "/ecc-jsbn",
+ "/sshpk"
+ ],
+ "_resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz",
+ "_shasum": "a5e654c2e5a2deb5f201d96cefbca80c0ef2f513",
+ "_spec": "jsbn@~0.1.0",
+ "_where": "/home/g/Workspace/hatthieves/familyARK/familyark/app/node_modules/sshpk",
+ "author": {
+ "name": "Tom Wu"
+ },
+ "bugs": {
+ "url": "https://github.com/andyperlitch/jsbn/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "The jsbn library is a fast, portable implementation of large-number math in pure JavaScript, enabling public-key crypto and other applications on desktop and mobile browsers.",
+ "homepage": "https://github.com/andyperlitch/jsbn#readme",
+ "keywords": [
+ "biginteger",
+ "bignumber",
+ "big",
+ "integer"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "jsbn",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/andyperlitch/jsbn.git"
+ },
+ "scripts": {
+ "test": "mocha test.js"
+ },
+ "version": "0.1.1"
+}
diff --git a/familyark/app/node_modules/json-schema-traverse/.eslintrc.yml b/familyark/app/node_modules/json-schema-traverse/.eslintrc.yml
new file mode 100644
index 0000000..ab1762d
--- /dev/null
+++ b/familyark/app/node_modules/json-schema-traverse/.eslintrc.yml
@@ -0,0 +1,27 @@
+extends: eslint:recommended
+env:
+ node: true
+ browser: true
+rules:
+ block-scoped-var: 2
+ complexity: [2, 13]
+ curly: [2, multi-or-nest, consistent]
+ dot-location: [2, property]
+ dot-notation: 2
+ indent: [2, 2, SwitchCase: 1]
+ linebreak-style: [2, unix]
+ new-cap: 2
+ no-console: [2, allow: [warn, error]]
+ no-else-return: 2
+ no-eq-null: 2
+ no-fallthrough: 2
+ no-invalid-this: 2
+ no-return-assign: 2
+ no-shadow: 1
+ no-trailing-spaces: 2
+ no-use-before-define: [2, nofunc]
+ quotes: [2, single, avoid-escape]
+ semi: [2, always]
+ strict: [2, global]
+ valid-jsdoc: [2, requireReturn: false]
+ no-control-regex: 0
diff --git a/familyark/app/node_modules/json-schema-traverse/.travis.yml b/familyark/app/node_modules/json-schema-traverse/.travis.yml
new file mode 100644
index 0000000..7ddce74
--- /dev/null
+++ b/familyark/app/node_modules/json-schema-traverse/.travis.yml
@@ -0,0 +1,8 @@
+language: node_js
+node_js:
+ - "4"
+ - "6"
+ - "7"
+ - "8"
+after_script:
+ - coveralls < coverage/lcov.info
diff --git a/familyark/app/node_modules/json-schema-traverse/LICENSE b/familyark/app/node_modules/json-schema-traverse/LICENSE
new file mode 100644
index 0000000..7f15435
--- /dev/null
+++ b/familyark/app/node_modules/json-schema-traverse/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2017 Evgeny Poberezkin
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/familyark/app/node_modules/json-schema-traverse/README.md b/familyark/app/node_modules/json-schema-traverse/README.md
new file mode 100644
index 0000000..d5ccaf4
--- /dev/null
+++ b/familyark/app/node_modules/json-schema-traverse/README.md
@@ -0,0 +1,83 @@
+# json-schema-traverse
+Traverse JSON Schema passing each schema object to callback
+
+[](https://travis-ci.org/epoberezkin/json-schema-traverse)
+[](https://www.npmjs.com/package/json-schema-traverse)
+[](https://coveralls.io/github/epoberezkin/json-schema-traverse?branch=master)
+
+
+## Install
+
+```
+npm install json-schema-traverse
+```
+
+
+## Usage
+
+```javascript
+const traverse = require('json-schema-traverse');
+const schema = {
+ properties: {
+ foo: {type: 'string'},
+ bar: {type: 'integer'}
+ }
+};
+
+traverse(schema, {cb});
+// cb is called 3 times with:
+// 1. root schema
+// 2. {type: 'string'}
+// 3. {type: 'integer'}
+
+// Or:
+
+traverse(schema, {cb: {pre, post}});
+// pre is called 3 times with:
+// 1. root schema
+// 2. {type: 'string'}
+// 3. {type: 'integer'}
+//
+// post is called 3 times with:
+// 1. {type: 'string'}
+// 2. {type: 'integer'}
+// 3. root schema
+
+```
+
+Callback function `cb` is called for each schema object (not including draft-06 boolean schemas), including the root schema, in pre-order traversal. Schema references ($ref) are not resolved, they are passed as is. Alternatively, you can pass a `{pre, post}` object as `cb`, and then `pre` will be called before traversing child elements, and `post` will be called after all child elements have been traversed.
+
+Callback is passed these parameters:
+
+- _schema_: the current schema object
+- _JSON pointer_: from the root schema to the current schema object
+- _root schema_: the schema passed to `traverse` object
+- _parent JSON pointer_: from the root schema to the parent schema object (see below)
+- _parent keyword_: the keyword inside which this schema appears (e.g. `properties`, `anyOf`, etc.)
+- _parent schema_: not necessarily parent object/array; in the example above the parent schema for `{type: 'string'}` is the root schema
+- _index/property_: index or property name in the array/object containing multiple schemas; in the example above for `{type: 'string'}` the property name is `'foo'`
+
+
+## Traverse objects in all unknown keywords
+
+```javascript
+const traverse = require('json-schema-traverse');
+const schema = {
+ mySchema: {
+ minimum: 1,
+ maximum: 2
+ }
+};
+
+traverse(schema, {allKeys: true, cb});
+// cb is called 2 times with:
+// 1. root schema
+// 2. mySchema
+```
+
+Without option `allKeys: true` callback will be called only with root schema.
+
+
+## License
+
+[MIT](https://github.com/epoberezkin/json-schema-traverse/blob/master/LICENSE)
diff --git a/familyark/app/node_modules/json-schema-traverse/index.js b/familyark/app/node_modules/json-schema-traverse/index.js
new file mode 100644
index 0000000..d4a18df
--- /dev/null
+++ b/familyark/app/node_modules/json-schema-traverse/index.js
@@ -0,0 +1,89 @@
+'use strict';
+
+var traverse = module.exports = function (schema, opts, cb) {
+ // Legacy support for v0.3.1 and earlier.
+ if (typeof opts == 'function') {
+ cb = opts;
+ opts = {};
+ }
+
+ cb = opts.cb || cb;
+ var pre = (typeof cb == 'function') ? cb : cb.pre || function() {};
+ var post = cb.post || function() {};
+
+ _traverse(opts, pre, post, schema, '', schema);
+};
+
+
+traverse.keywords = {
+ additionalItems: true,
+ items: true,
+ contains: true,
+ additionalProperties: true,
+ propertyNames: true,
+ not: true
+};
+
+traverse.arrayKeywords = {
+ items: true,
+ allOf: true,
+ anyOf: true,
+ oneOf: true
+};
+
+traverse.propsKeywords = {
+ definitions: true,
+ properties: true,
+ patternProperties: true,
+ dependencies: true
+};
+
+traverse.skipKeywords = {
+ default: true,
+ enum: true,
+ const: true,
+ required: true,
+ maximum: true,
+ minimum: true,
+ exclusiveMaximum: true,
+ exclusiveMinimum: true,
+ multipleOf: true,
+ maxLength: true,
+ minLength: true,
+ pattern: true,
+ format: true,
+ maxItems: true,
+ minItems: true,
+ uniqueItems: true,
+ maxProperties: true,
+ minProperties: true
+};
+
+
+function _traverse(opts, pre, post, schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex) {
+ if (schema && typeof schema == 'object' && !Array.isArray(schema)) {
+ pre(schema, jsonPtr, rootSchema, parentJsonPtr, parentKeyword, parentSchema, keyIndex);
+ for (var key in schema) {
+ var sch = schema[key];
+ if (Array.isArray(sch)) {
+ if (key in traverse.arrayKeywords) {
+ for (var i=0; i
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+]>
+
+
+
+
+
+
+
+
+ A JSON Media Type for Describing the Structure and Meaning of JSON Documents
+
+
+ SitePen (USA)
+
+
+ 530 Lytton Avenue
+ Palo Alto, CA 94301
+ USA
+
+ +1 650 968 8787
+ kris@sitepen.com
+
+
+
+
+
+
+
+ Calgary, AB
+ Canada
+
+ gary.court@gmail.com
+
+
+
+
+ Internet Engineering Task Force
+ JSON
+ Schema
+ JavaScript
+ Object
+ Notation
+ Hyper Schema
+ Hypermedia
+
+
+
+ JSON (JavaScript Object Notation) Schema defines the media type "application/schema+json",
+ a JSON based format for defining
+ the structure of JSON data. JSON Schema provides a contract for what JSON
+ data is required for a given application and how to interact with it. JSON
+ Schema is intended to define validation, documentation, hyperlink
+ navigation, and interaction control of JSON data.
+
+
+
+
+
+
+
+ JSON (JavaScript Object Notation) Schema is a JSON media type for defining
+ the structure of JSON data. JSON Schema provides a contract for what JSON
+ data is required for a given application and how to interact with it. JSON
+ Schema is intended to define validation, documentation, hyperlink
+ navigation, and interaction control of JSON data.
+
+
+
+
+
+
+
+ The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD",
+ "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be
+ interpreted as described in RFC 2119.
+
+
+
+
+
+
+
+ JSON Schema defines the media type "application/schema+json" for
+ describing the structure of other
+ JSON documents. JSON Schema is JSON-based and includes facilities
+ for describing the structure of JSON documents in terms of
+ allowable values, descriptions, and interpreting relations with other resources.
+
+
+ JSON Schema format is organized into several separate definitions. The first
+ definition is the core schema specification. This definition is primary
+ concerned with describing a JSON structure and specifying valid elements
+ in the structure. The second definition is the Hyper Schema specification
+ which is intended define elements in a structure that can be interpreted as
+ hyperlinks.
+ Hyper Schema builds on JSON Schema to describe the hyperlink structure of
+ other JSON documents and elements of interaction. This allows user agents to be able to successfully navigate
+ JSON documents based on their schemas.
+
+
+ Cumulatively JSON Schema acts as a meta-document that can be used to define the required type and constraints on
+ property values, as well as define the meaning of the property values
+ for the purpose of describing a resource and determining hyperlinks
+ within the representation.
+
+
+ An example JSON Schema that describes products might look like:
+
+
+
+
+ This schema defines the properties of the instance JSON documents,
+ the required properties (id, name, and price), as well as an optional
+ property (tags). This also defines the link relations of the instance
+ JSON documents.
+
+
+
+
+
+ For this specification, schema will be used to denote a JSON Schema
+ definition, and an instance refers to a JSON value that the schema
+ will be describing and validating.
+
+
+
+
+
+ The JSON Schema media type does not attempt to dictate the structure of JSON
+ representations that contain data, but rather provides a separate format
+ for flexibly communicating how a JSON representation should be
+ interpreted and validated, such that user agents can properly understand
+ acceptable structures and extrapolate hyperlink information
+ with the JSON document. It is acknowledged that JSON documents come
+ in a variety of structures, and JSON is unique in that the structure
+ of stored data structures often prescribes a non-ambiguous definite
+ JSON representation. Attempting to force a specific structure is generally
+ not viable, and therefore JSON Schema allows for a great flexibility
+ in the structure of the JSON data that it describes.
+
+
+ This specification is protocol agnostic.
+ The underlying protocol (such as HTTP) should sufficiently define the
+ semantics of the client-server interface, the retrieval of resource
+ representations linked to by JSON representations, and modification of
+ those resources. The goal of this
+ format is to sufficiently describe JSON structures such that one can
+ utilize existing information available in existing JSON
+ representations from a large variety of services that leverage a representational state transfer
+ architecture using existing protocols.
+
+
+
+
+
+
+ JSON Schema instances are correlated to their schema by the "describedby"
+ relation, where the schema is defined to be the target of the relation.
+ Instance representations may be of the "application/json" media type or
+ any other subtype. Consequently, dictating how an instance
+ representation should specify the relation to the schema is beyond the normative scope
+ of this document (since this document specifically defines the JSON
+ Schema media type, and no other), but it is recommended that instances
+ specify their schema so that user agents can interpret the instance
+ representation and messages may retain the self-descriptive
+ characteristic, avoiding the need for out-of-band information about
+ instance data. Two approaches are recommended for declaring the
+ relation to the schema that describes the meaning of a JSON instance's (or collection
+ of instances) structure. A MIME type parameter named
+ "profile" or a relation of "describedby" (which could be defined by a Link header) may be used:
+
+
+
+
+
+
+
+ or if the content is being transferred by a protocol (such as HTTP) that
+ provides headers, a Link header can be used:
+
+
+
+; rel="describedby"
+]]>
+
+
+
+ Instances MAY specify multiple schemas, to indicate all the schemas that
+ are applicable to the data, and the data SHOULD be valid by all the schemas.
+ The instance data MAY have multiple schemas
+ that it is defined by (the instance data SHOULD be valid for those schemas).
+ Or if the document is a collection of instances, the collection MAY contain
+ instances from different schemas. When collections contain heterogeneous
+ instances, the "pathStart" attribute MAY be specified in the
+ schema to disambiguate which schema should be applied for each item in the
+ collection. However, ultimately, the mechanism for referencing a schema is up to the
+ media type of the instance documents (if they choose to specify that schemas
+ can be referenced).
+
+
+
+
+ JSON Schemas can themselves be described using JSON Schemas.
+ A self-describing JSON Schema for the core JSON Schema can
+ be found at http://json-schema.org/schema for the latest version or
+ http://json-schema.org/draft-03/schema for the draft-03 version. The hyper schema
+ self-description can be found at http://json-schema.org/hyper-schema
+ or http://json-schema.org/draft-03/hyper-schema. All schemas
+ used within a protocol with media type definitions
+ SHOULD include a MIME parameter that refers to the self-descriptive
+ hyper schema or another schema that extends this hyper schema:
+
+
+
+
+
+
+
+
+
+
+
+
+ A JSON Schema is a JSON Object that defines various attributes
+ (including usage and valid values) of a JSON value. JSON
+ Schema has recursive capabilities; there are a number of elements
+ in the structure that allow for nested JSON Schemas.
+
+
+
+ An example JSON Schema definition could look like:
+
+
+
+
+
+
+ A JSON Schema object may have any of the following properties, called schema
+ attributes (all attributes are optional):
+
+
+
+
+ This attribute defines what the primitive type or the schema of the instance MUST be in order to validate.
+ This attribute can take one of two forms:
+
+
+
+ A string indicating a primitive or simple type. The following are acceptable string values:
+
+
+ Value MUST be a string.
+ Value MUST be a number, floating point numbers are allowed.
+ Value MUST be an integer, no floating point numbers are allowed. This is a subset of the number type.
+ Value MUST be a boolean.
+ Value MUST be an object.
+ Value MUST be an array.
+ Value MUST be null. Note this is mainly for purpose of being able use union types to define nullability. If this type is not included in a union, null values are not allowed (the primitives listed above do not allow nulls on their own).
+ Value MAY be of any type including null.
+
+
+ If the property is not defined or is not in this list, then any type of value is acceptable.
+ Other type values MAY be used for custom purposes, but minimal validators of the specification
+ implementation can allow any instance value on unknown type values.
+
+
+
+ An array of two or more simple type definitions. Each item in the array MUST be a simple type definition or a schema.
+ The instance value is valid if it is of the same type as one of the simple type definitions, or valid by one of the schemas, in the array.
+
+
+
+
+
+ For example, a schema that defines if an instance can be a string or a number would be:
+
+
+
+
+
+
+ This attribute is an object with property definitions that define the valid values of instance object property values. When the instance value is an object, the property values of the instance object MUST conform to the property definitions in this object. In this object, each property definition's value MUST be a schema, and the property's name MUST be the name of the instance property that it defines. The instance property value MUST be valid according to the schema from the property definition. Properties are considered unordered, the order of the instance properties MAY be in any order.
+
+
+
+ This attribute is an object that defines the schema for a set of property names of an object instance. The name of each property of this attribute's object is a regular expression pattern in the ECMA 262/Perl 5 format, while the value is a schema. If the pattern matches the name of a property on the instance object, the value of the instance's property MUST be valid against the pattern name's schema value.
+
+
+
+ This attribute defines a schema for all properties that are not explicitly defined in an object type definition. If specified, the value MUST be a schema or a boolean. If false is provided, no additional properties are allowed beyond the properties defined in the schema. The default value is an empty schema which allows any value for additional properties.
+
+
+
+ This attribute defines the allowed items in an instance array, and MUST be a schema or an array of schemas. The default value is an empty schema which allows any value for items in the instance array.
+ When this attribute value is a schema and the instance value is an array, then all the items in the array MUST be valid according to the schema.
+ When this attribute value is an array of schemas and the instance value is an array, each position in the instance array MUST conform to the schema in the corresponding position for this array. This called tuple typing. When tuple typing is used, additional items are allowed, disallowed, or constrained by the "additionalItems" attribute using the same rules as "additionalProperties" for objects.
+
+
+
+ This provides a definition for additional items in an array instance when tuple definitions of the items is provided. This can be false to indicate additional items in the array are not allowed, or it can be a schema that defines the schema of the additional items.
+
+
+
+ This attribute indicates if the instance must have a value, and not be undefined. This is false by default, making the instance optional.
+
+
+
+ This attribute is an object that defines the requirements of a property on an instance object. If an object instance has a property with the same name as a property in this attribute's object, then the instance must be valid against the attribute's property value (hereafter referred to as the "dependency value").
+
+ The dependency value can take one of two forms:
+
+
+
+ If the dependency value is a string, then the instance object MUST have a property with the same name as the dependency value.
+ If the dependency value is an array of strings, then the instance object MUST have a property with the same name as each string in the dependency value's array.
+
+
+ If the dependency value is a schema, then the instance object MUST be valid against the schema.
+
+
+
+
+
+
+ This attribute defines the minimum value of the instance property when the type of the instance value is a number.
+
+
+
+ This attribute defines the maximum value of the instance property when the type of the instance value is a number.
+
+
+
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "minimum" attribute. This is false by default, meaning the instance value can be greater then or equal to the minimum value.
+
+
+
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "maximum" attribute. This is false by default, meaning the instance value can be less then or equal to the maximum value.
+
+
+
+ This attribute defines the minimum number of values in an array when the array is the instance value.
+
+
+
+ This attribute defines the maximum number of values in an array when the array is the instance value.
+
+
+
+ This attribute indicates that all items in an array instance MUST be unique (contains no two identical values).
+
+ Two instance are consider equal if they are both of the same type and:
+
+
+ are null; or
+ are booleans/numbers/strings and have the same value; or
+ are arrays, contains the same number of items, and each item in the array is equal to the corresponding item in the other array; or
+ are objects, contains the same property names, and each property in the object is equal to the corresponding property in the other object.
+
+
+
+
+
+ When the instance value is a string, this provides a regular expression that a string instance MUST match in order to be valid. Regular expressions SHOULD follow the regular expression specification from ECMA 262/Perl 5
+
+
+
+ When the instance value is a string, this defines the minimum length of the string.
+
+
+
+ When the instance value is a string, this defines the maximum length of the string.
+
+
+
+ This provides an enumeration of all possible values that are valid for the instance property. This MUST be an array, and each item in the array represents a possible value for the instance value. If this attribute is defined, the instance value MUST be one of the values in the array in order for the schema to be valid. Comparison of enum values uses the same algorithm as defined in "uniqueItems".
+
+
+
+ This attribute defines the default value of the instance when the instance is undefined.
+
+
+
+ This attribute is a string that provides a short description of the instance property.
+
+
+
+ This attribute is a string that provides a full description of the of purpose the instance property.
+
+
+
+ This property defines the type of data, content type, or microformat to be expected in the instance property values. A format attribute MAY be one of the values listed below, and if so, SHOULD adhere to the semantics describing for the format. A format SHOULD only be used to give meaning to primitive types (string, integer, number, or boolean). Validators MAY (but are not required to) validate that the instance values conform to a format.
+
+
+ The following formats are predefined:
+
+
+ This SHOULD be a date in ISO 8601 format of YYYY-MM-DDThh:mm:ssZ in UTC time. This is the recommended form of date/timestamp.
+ This SHOULD be a date in the format of YYYY-MM-DD. It is recommended that you use the "date-time" format instead of "date" unless you need to transfer only the date part.
+ This SHOULD be a time in the format of hh:mm:ss. It is recommended that you use the "date-time" format instead of "time" unless you need to transfer only the time part.
+ This SHOULD be the difference, measured in milliseconds, between the specified time and midnight, 00:00 of January 1, 1970 UTC. The value SHOULD be a number (integer or float).
+ A regular expression, following the regular expression specification from ECMA 262/Perl 5.
+ This is a CSS color (like "#FF0000" or "red"), based on CSS 2.1.
+ This is a CSS style definition (like "color: red; background-color:#FFF"), based on CSS 2.1.
+ This SHOULD be a phone number (format MAY follow E.123).
+ This value SHOULD be a URI.
+ This SHOULD be an email address.
+ This SHOULD be an ip version 4 address.
+ This SHOULD be an ip version 6 address.
+ This SHOULD be a host-name.
+
+
+
+ Additional custom formats MAY be created. These custom formats MAY be expressed as an URI, and this URI MAY reference a schema of that format.
+
+
+
+ This attribute defines what value the number instance must be divisible by with no remainder (the result of the division must be an integer.) The value of this attribute SHOULD NOT be 0.
+
+
+
+ This attribute takes the same values as the "type" attribute, however if the instance matches the type or if this value is an array and the instance matches any type or schema in the array, then this instance is not valid.
+
+
+
+ The value of this property MUST be another schema which will provide a base schema which the current schema will inherit from. The inheritance rules are such that any instance that is valid according to the current schema MUST be valid according to the referenced schema. This MAY also be an array, in which case, the instance MUST be valid for all the schemas in the array. A schema that extends another schema MAY define additional attributes, constrain existing attributes, or add other constraints.
+
+ Conceptually, the behavior of extends can be seen as validating an
+ instance against all constraints in the extending schema as well as
+ the extended schema(s). More optimized implementations that merge
+ schemas are possible, but are not required. Some examples of using "extends":
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This attribute defines the current URI of this schema (this attribute is
+ effectively a "self" link). This URI MAY be relative or absolute. If
+ the URI is relative it is resolved against the current URI of the parent
+ schema it is contained in. If this schema is not contained in any
+ parent schema, the current URI of the parent schema is held to be the
+ URI under which this schema was addressed. If id is missing, the current URI of a schema is
+ defined to be that of the parent schema. The current URI of the schema
+ is also used to construct relative references such as for $ref.
+
+
+
+
+
+ This attribute defines a URI of a schema that contains the full representation of this schema.
+ When a validator encounters this attribute, it SHOULD replace the current schema with the schema referenced by the value's URI (if known and available) and re-validate the instance.
+ This URI MAY be relative or absolute, and relative URIs SHOULD be resolved against the URI of the current schema.
+
+
+
+
+
+ This attribute defines a URI of a JSON Schema that is the schema of the current schema.
+ When this attribute is defined, a validator SHOULD use the schema referenced by the value's URI (if known and available) when resolving Hyper Schemalinks.
+
+
+
+ A validator MAY use this attribute's value to determine which version of JSON Schema the current schema is written in, and provide the appropriate validation features and behavior.
+ Therefore, it is RECOMMENDED that all schema authors include this attribute in their schemas to prevent conflicts with future JSON Schema specification changes.
+
+
+
+
+
+
+ The following attributes are specified in addition to those
+ attributes that already provided by the core schema with the specific
+ purpose of informing user agents of relations between resources based
+ on JSON data. Just as with JSON
+ schema attributes, all the attributes in hyper schemas are optional.
+ Therefore, an empty object is a valid (non-informative) schema, and
+ essentially describes plain JSON (no constraints on the structures).
+ Addition of attributes provides additive information for user agents.
+
+
+
+
+ The value of the links property MUST be an array, where each item
+ in the array is a link description object which describes the link
+ relations of the instances.
+
+
+
+
+ A link description object is used to describe link relations. In
+ the context of a schema, it defines the link relations of the
+ instances of the schema, and can be parameterized by the instance
+ values. The link description format can be used on its own in
+ regular (non-schema documents), and use of this format can
+ be declared by referencing the normative link description
+ schema as the the schema for the data structure that uses the
+ links. The URI of the normative link description schema is:
+ http://json-schema.org/links (latest version) or
+ http://json-schema.org/draft-03/links (draft-03 version).
+
+
+
+
+ The value of the "href" link description property
+ indicates the target URI of the related resource. The value
+ of the instance property SHOULD be resolved as a URI-Reference per RFC 3986
+ and MAY be a relative URI. The base URI to be used for relative resolution
+ SHOULD be the URI used to retrieve the instance object (not the schema)
+ when used within a schema. Also, when links are used within a schema, the URI
+ SHOULD be parametrized by the property values of the instance
+ object, if property values exist for the corresponding variables
+ in the template (otherwise they MAY be provided from alternate sources, like user input).
+
+
+
+ Instance property values SHOULD be substituted into the URIs where
+ matching braces ('{', '}') are found surrounding zero or more characters,
+ creating an expanded URI. Instance property value substitutions are resolved
+ by using the text between the braces to denote the property name
+ from the instance to get the value to substitute.
+
+
+ For example, if an href value is defined:
+
+
+
+ Then it would be resolved by replace the value of the "id" property value from the instance object.
+
+
+
+ If the value of the "id" property was "45", the expanded URI would be:
+
+
+
+
+
+ If matching braces are found with the string "@" (no quotes) between the braces, then the
+ actual instance value SHOULD be used to replace the braces, rather than a property value.
+ This should only be used in situations where the instance is a scalar (string,
+ boolean, or number), and not for objects or arrays.
+
+
+
+
+
+ The value of the "rel" property indicates the name of the
+ relation to the target resource. The relation to the target SHOULD be interpreted as specifically from the instance object that the schema (or sub-schema) applies to, not just the top level resource that contains the object within its hierarchy. If a resource JSON representation contains a sub object with a property interpreted as a link, that sub-object holds the relation with the target. A relation to target from the top level resource MUST be indicated with the schema describing the top level JSON representation.
+
+
+
+ Relationship definitions SHOULD NOT be media type dependent, and users are encouraged to utilize existing accepted relation definitions, including those in existing relation registries (see RFC 4287). However, we define these relations here for clarity of normative interpretation within the context of JSON hyper schema defined relations:
+
+
+
+ If the relation value is "self", when this property is encountered in
+ the instance object, the object represents a resource and the instance object is
+ treated as a full representation of the target resource identified by
+ the specified URI.
+
+
+
+ This indicates that the target of the link is the full representation for the instance object. The object that contains this link possibly may not be the full representation.
+
+
+
+ This indicates the target of the link is the schema for the instance object. This MAY be used to specifically denote the schemas of objects within a JSON object hierarchy, facilitating polymorphic type data structures.
+
+
+
+ This relation indicates that the target of the link
+ SHOULD be treated as the root or the body of the representation for the
+ purposes of user agent interaction or fragment resolution. All other
+ properties of the instance objects can be regarded as meta-data
+ descriptions for the data.
+
+
+
+
+
+ The following relations are applicable for schemas (the schema as the "from" resource in the relation):
+
+
+ This indicates the target resource that represents collection of instances of a schema.
+ This indicates a target to use for creating new instances of a schema. This link definition SHOULD be a submission link with a non-safe method (like POST).
+
+
+
+
+
+ For example, if a schema is defined:
+
+
+
+
+
+
+ And if a collection of instance resource's JSON representation was retrieved:
+
+
+
+
+
+ This would indicate that for the first item in the collection, its own
+ (self) URI would resolve to "/Resource/thing" and the first item's "up"
+ relation SHOULD be resolved to the resource at "/Resource/parent".
+ The "children" collection would be located at "/Resource/?upId=thing".
+
+
+
+
+ This property value is a schema that defines the expected structure of the JSON representation of the target of the link.
+
+
+
+
+ The following properties also apply to link definition objects, and
+ provide functionality analogous to HTML forms, in providing a
+ means for submitting extra (often user supplied) information to send to a server.
+
+
+
+
+ This attribute defines which method can be used to access the target resource.
+ In an HTTP environment, this would be "GET" or "POST" (other HTTP methods
+ such as "PUT" and "DELETE" have semantics that are clearly implied by
+ accessed resources, and do not need to be defined here).
+ This defaults to "GET".
+
+
+
+
+
+ If present, this property indicates a query media type format that the server
+ supports for querying or posting to the collection of instances at the target
+ resource. The query can be
+ suffixed to the target URI to query the collection with
+ property-based constraints on the resources that SHOULD be returned from
+ the server or used to post data to the resource (depending on the method).
+
+
+ For example, with the following schema:
+
+
+
+ This indicates that the client can query the server for instances that have a specific name.
+
+
+
+ For example:
+
+
+
+
+
+ If no enctype or method is specified, only the single URI specified by
+ the href property is defined. If the method is POST, "application/json" is
+ the default media type.
+
+
+
+
+
+ This attribute contains a schema which defines the acceptable structure of the submitted
+ request (for a GET request, this schema would define the properties for the query string
+ and for a POST request, this would define the body).
+
+
+
+
+
+
+
+
+ This property indicates the fragment resolution protocol to use for
+ resolving fragment identifiers in URIs within the instance
+ representations. This applies to the instance object URIs and all
+ children of the instance object's URIs. The default fragment resolution
+ protocol is "slash-delimited", which is defined below. Other fragment
+ resolution protocols MAY be used, but are not defined in this document.
+
+
+
+ The fragment identifier is based on RFC 2396, Sec 5, and defines the
+ mechanism for resolving references to entities within a document.
+
+
+
+
+ With the slash-delimited fragment resolution protocol, the fragment
+ identifier is interpreted as a series of property reference tokens that start with and
+ are delimited by the "/" character (\x2F). Each property reference token
+ is a series of unreserved or escaped URI characters. Each property
+ reference token SHOULD be interpreted, starting from the beginning of
+ the fragment identifier, as a path reference in the target JSON
+ structure. The final target value of the fragment can be determined by
+ starting with the root of the JSON structure from the representation of
+ the resource identified by the pre-fragment URI. If the target is a JSON
+ object, then the new target is the value of the property with the name
+ identified by the next property reference token in the fragment. If the
+ target is a JSON array, then the target is determined by finding the
+ item in array the array with the index defined by the next property
+ reference token (which MUST be a number). The target is successively
+ updated for each property reference token, until the entire fragment has
+ been traversed.
+
+
+
+ Property names SHOULD be URI-encoded. In particular, any "/" in a
+ property name MUST be encoded to avoid being interpreted as a property
+ delimiter.
+
+
+
+
+ For example, for the following JSON representation:
+
+
+
+
+
+
+ The following fragment identifiers would be resolved:
+
+
+
+
+
+
+
+
+
+ The dot-delimited fragment resolution protocol is the same as
+ slash-delimited fragment resolution protocol except that the "." character
+ (\x2E) is used as the delimiter between property names (instead of "/") and
+ the path does not need to start with a ".". For example, #.foo and #foo are a valid fragment
+ identifiers for referencing the value of the foo propery.
+
+
+
+
+
+ This attribute indicates that the instance property SHOULD NOT be changed. Attempts by a user agent to modify the value of this property are expected to be rejected by a server.
+
+
+
+ If the instance property value is a string, this attribute defines that the string SHOULD be interpreted as binary data and decoded using the encoding named by this schema property. RFC 2045, Sec 6.1 lists the possible values for this property.
+
+
+
+
+ This attribute is a URI that defines what the instance's URI MUST start with in order to validate.
+ The value of the "pathStart" attribute MUST be resolved as per RFC 3986, Sec 5,
+ and is relative to the instance's URI.
+
+
+
+ When multiple schemas have been referenced for an instance, the user agent
+ can determine if this schema is applicable for a particular instance by
+ determining if the URI of the instance begins with the the value of the "pathStart"
+ attribute. If the URI of the instance does not start with this URI,
+ or if another schema specifies a starting URI that is longer and also matches the
+ instance, this schema SHOULD NOT be applied to the instance. Any schema
+ that does not have a pathStart attribute SHOULD be considered applicable
+ to all the instances for which it is referenced.
+
+
+
+
+ This attribute defines the media type of the instance representations that this schema is defining.
+
+
+
+
+
+ This specification is a sub-type of the JSON format, and
+ consequently the security considerations are generally the same as RFC 4627.
+ However, an additional issue is that when link relation of "self"
+ is used to denote a full representation of an object, the user agent
+ SHOULD NOT consider the representation to be the authoritative representation
+ of the resource denoted by the target URI if the target URI is not
+ equivalent to or a sub-path of the the URI used to request the resource
+ representation which contains the target URI with the "self" link.
+
+
+ For example, if a hyper schema was defined:
+
+
+
+
+
+
+ And a resource was requested from somesite.com:
+
+
+
+
+
+
+ With a response of:
+
+
+
+
+
+
+
+
+ The proposed MIME media type for JSON Schema is "application/schema+json".
+ Type name: application
+ Subtype name: schema+json
+ Required parameters: profile
+
+ The value of the profile parameter SHOULD be a URI (relative or absolute) that
+ refers to the schema used to define the structure of this structure (the
+ meta-schema). Normally the value would be http://json-schema.org/draft-03/hyper-schema,
+ but it is allowable to use other schemas that extend the hyper schema's meta-
+ schema.
+
+ Optional parameters: pretty
+ The value of the pretty parameter MAY be true or false to indicate if additional whitespace has been included to make the JSON representation easier to read.
+
+
+
+ This registry is maintained by IANA per RFC 4287 and this specification adds
+ four values: "full", "create", "instances", "root". New
+ assignments are subject to IESG Approval, as outlined in RFC 5226.
+ Requests should be made by email to IANA, which will then forward the
+ request to the IESG, requesting approval.
+
+
+
+
+
+
+
+
+ &rfc2045;
+ &rfc2119;
+ &rfc2396;
+ &rfc3339;
+ &rfc3986;
+ &rfc4287;
+
+
+ &rfc2616;
+ &rfc4627;
+ &rfc5226;
+ &iddiscovery;
+ &uritemplate;
+ &linkheader;
+ &html401;
+ &css21;
+
+
+
+
+
+
+
+ Added example and verbiage to "extends" attribute.
+ Defined slash-delimited to use a leading slash.
+ Made "root" a relation instead of an attribute.
+ Removed address values, and MIME media type from format to reduce confusion (mediaType already exists, so it can be used for MIME types).
+ Added more explanation of nullability.
+ Removed "alternate" attribute.
+ Upper cased many normative usages of must, may, and should.
+ Replaced the link submission "properties" attribute to "schema" attribute.
+ Replaced "optional" attribute with "required" attribute.
+ Replaced "maximumCanEqual" attribute with "exclusiveMaximum" attribute.
+ Replaced "minimumCanEqual" attribute with "exclusiveMinimum" attribute.
+ Replaced "requires" attribute with "dependencies" attribute.
+ Moved "contentEncoding" attribute to hyper schema.
+ Added "additionalItems" attribute.
+ Added "id" attribute.
+ Switched self-referencing variable substitution from "-this" to "@" to align with reserved characters in URI template.
+ Added "patternProperties" attribute.
+ Schema URIs are now namespace versioned.
+ Added "$ref" and "$schema" attributes.
+
+
+
+
+
+ Replaced "maxDecimal" attribute with "divisibleBy" attribute.
+ Added slash-delimited fragment resolution protocol and made it the default.
+ Added language about using links outside of schemas by referencing its normative URI.
+ Added "uniqueItems" attribute.
+ Added "targetSchema" attribute to link description object.
+
+
+
+
+
+ Fixed category and updates from template.
+
+
+
+
+
+ Initial draft.
+
+
+
+
+
+
+
+
+
+ Should we give a preference to MIME headers over Link headers (or only use one)?
+ Should "root" be a MIME parameter?
+ Should "format" be renamed to "mediaType" or "contentType" to reflect the usage MIME media types that are allowed?
+ How should dates be handled?
+
+
+
+
+
diff --git a/familyark/app/node_modules/json-schema/draft-zyp-json-schema-04.xml b/familyark/app/node_modules/json-schema/draft-zyp-json-schema-04.xml
new file mode 100644
index 0000000..8ede6bf
--- /dev/null
+++ b/familyark/app/node_modules/json-schema/draft-zyp-json-schema-04.xml
@@ -0,0 +1,1072 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+]>
+
+
+
+
+
+
+
+
+ A JSON Media Type for Describing the Structure and Meaning of JSON Documents
+
+
+ SitePen (USA)
+
+
+ 530 Lytton Avenue
+ Palo Alto, CA 94301
+ USA
+
+ +1 650 968 8787
+ kris@sitepen.com
+
+
+
+
+
+
+
+ Calgary, AB
+ Canada
+
+ gary.court@gmail.com
+
+
+
+
+ Internet Engineering Task Force
+ JSON
+ Schema
+ JavaScript
+ Object
+ Notation
+ Hyper Schema
+ Hypermedia
+
+
+
+ JSON (JavaScript Object Notation) Schema defines the media type "application/schema+json",
+ a JSON based format for defining the structure of JSON data. JSON Schema provides a contract for what JSON
+ data is required for a given application and how to interact with it. JSON
+ Schema is intended to define validation, documentation, hyperlink
+ navigation, and interaction control of JSON data.
+
+
+
+
+
+
+
+ JSON (JavaScript Object Notation) Schema is a JSON media type for defining
+ the structure of JSON data. JSON Schema provides a contract for what JSON
+ data is required for a given application and how to interact with it. JSON
+ Schema is intended to define validation, documentation, hyperlink
+ navigation, and interaction control of JSON data.
+
+
+
+
+
+
+
+ The key words "MUST", "MUST NOT", "REQUIRED", "SHALL", "SHALL NOT", "SHOULD",
+ "SHOULD NOT", "RECOMMENDED", "MAY", and "OPTIONAL" in this document are to be
+ interpreted as described in RFC 2119.
+
+
+
+ The terms "JSON", "JSON text", "JSON value", "member", "element", "object",
+ "array", "number", "string", "boolean", "true", "false", and "null" in this
+ document are to be interpreted as defined in RFC 4627.
+
+
+
+ This specification also uses the following defined terms:
+
+
+ A JSON Schema object.
+ Equivalent to "JSON value" as defined in RFC 4627.
+ Equivalent to "member" as defined in RFC 4627.
+ Equivalent to "element" as defined in RFC 4627.
+ A property of a JSON Schema object.
+
+
+
+
+
+
+ JSON Schema defines the media type "application/schema+json" for
+ describing the structure of JSON text. JSON Schemas are also written in JSON and includes facilities
+ for describing the structure of JSON in terms of
+ allowable values, descriptions, and interpreting relations with other resources.
+
+
+ This document is organized into several separate definitions. The first
+ definition is the core schema specification. This definition is primary
+ concerned with describing a JSON structure and specifying valid elements
+ in the structure. The second definition is the Hyper Schema specification
+ which is intended to define elements in a structure that can be interpreted as
+ hyperlinks.
+ Hyper Schema builds on JSON Schema to describe the hyperlink structure of
+ JSON values. This allows user agents to be able to successfully navigate
+ documents containing JSON based on their schemas.
+
+
+ Cumulatively JSON Schema acts as meta-JSON that can be used to define the
+ required type and constraints on JSON values, as well as define the meaning
+ of the JSON values for the purpose of describing a resource and determining
+ hyperlinks within the representation.
+
+
+ An example JSON Schema that describes products might look like:
+
+
+
+
+ This schema defines the properties of the instance,
+ the required properties (id, name, and price), as well as an optional
+ property (tags). This also defines the link relations of the instance.
+
+
+
+
+
+ The JSON Schema media type does not attempt to dictate the structure of JSON
+ values that contain data, but rather provides a separate format
+ for flexibly communicating how a JSON value should be
+ interpreted and validated, such that user agents can properly understand
+ acceptable structures and extrapolate hyperlink information
+ from the JSON. It is acknowledged that JSON values come
+ in a variety of structures, and JSON is unique in that the structure
+ of stored data structures often prescribes a non-ambiguous definite
+ JSON representation. Attempting to force a specific structure is generally
+ not viable, and therefore JSON Schema allows for a great flexibility
+ in the structure of the JSON data that it describes.
+
+
+ This specification is protocol agnostic.
+ The underlying protocol (such as HTTP) should sufficiently define the
+ semantics of the client-server interface, the retrieval of resource
+ representations linked to by JSON representations, and modification of
+ those resources. The goal of this
+ format is to sufficiently describe JSON structures such that one can
+ utilize existing information available in existing JSON
+ representations from a large variety of services that leverage a representational state transfer
+ architecture using existing protocols.
+
+
+
+
+
+
+ JSON values are correlated to their schema by the "describedby"
+ relation, where the schema is the target of the relation.
+ JSON values MUST be of the "application/json" media type or
+ any other subtype. Consequently, dictating how a JSON value should
+ specify the relation to the schema is beyond the normative scope
+ of this document since this document specifically defines the JSON
+ Schema media type, and no other. It is RECOMMNENDED that JSON values
+ specify their schema so that user agents can interpret the instance
+ and retain the self-descriptive characteristics. This avoides the need for out-of-band information about
+ instance data. Two approaches are recommended for declaring the
+ relation to the schema that describes the meaning of a JSON instance's (or collection
+ of instances) structure. A MIME type parameter named
+ "profile" or a relation of "describedby" (which could be specified by a Link header) may be used:
+
+
+
+
+
+
+
+ or if the content is being transferred by a protocol (such as HTTP) that
+ provides headers, a Link header can be used:
+
+
+
+; rel="describedby"
+]]>
+
+
+
+ Instances MAY specify multiple schemas, to indicate all the schemas that
+ are applicable to the data, and the data SHOULD be valid by all the schemas.
+ The instance data MAY have multiple schemas
+ that it is described by (the instance data SHOULD be valid for those schemas).
+ Or if the document is a collection of instances, the collection MAY contain
+ instances from different schemas. The mechanism for referencing a schema is
+ determined by the media type of the instance (if it provides a method for
+ referencing schemas).
+
+
+
+
+ JSON Schemas can themselves be described using JSON Schemas.
+ A self-describing JSON Schema for the core JSON Schema can
+ be found at http://json-schema.org/schema for the latest version or
+ http://json-schema.org/draft-04/schema for the draft-04 version. The hyper schema
+ self-description can be found at http://json-schema.org/hyper-schema
+ or http://json-schema.org/draft-04/hyper-schema. All schemas
+ used within a protocol with a media type specified SHOULD include a MIME parameter that refers to the self-descriptive
+ hyper schema or another schema that extends this hyper schema:
+
+
+
+
+
+
+
+
+
+
+
+
+ A JSON Schema is a JSON object that defines various attributes
+ (including usage and valid values) of a JSON value. JSON
+ Schema has recursive capabilities; there are a number of elements
+ in the structure that allow for nested JSON Schemas.
+
+
+
+ An example JSON Schema could look like:
+
+
+
+
+
+
+ A JSON Schema object MAY have any of the following optional properties:
+
+
+
+
+
+
+
+ This attribute defines what the primitive type or the schema of the instance MUST be in order to validate.
+ This attribute can take one of two forms:
+
+
+
+ A string indicating a primitive or simple type. The string MUST be one of the following values:
+
+
+ Instance MUST be an object.
+ Instance MUST be an array.
+ Instance MUST be a string.
+ Instance MUST be a number, including floating point numbers.
+ Instance MUST be the JSON literal "true" or "false".
+ Instance MUST be the JSON literal "null". Note that without this type, null values are not allowed.
+ Instance MAY be of any type, including null.
+
+
+
+
+ An array of one or more simple or schema types.
+ The instance value is valid if it is of the same type as one of the simple types, or valid by one of the schemas, in the array.
+
+
+
+ If this attribute is not specified, then all value types are accepted.
+
+
+
+ For example, a schema that defines if an instance can be a string or a number would be:
+
+
+
+
+
+
+
+ This attribute is an object with properties that specify the schemas for the properties of the instance object.
+ In this attribute's object, each property value MUST be a schema.
+ When the instance value is an object, the value of the instance's properties MUST be valid according to the schemas with the same property names specified in this attribute.
+ Objects are unordered, so therefore the order of the instance properties or attribute properties MUST NOT determine validation success.
+
+
+
+
+
+ This attribute is an object that defines the schema for a set of property names of an object instance.
+ The name of each property of this attribute's object is a regular expression pattern in the ECMA 262/Perl 5 format, while the value is a schema.
+ If the pattern matches the name of a property on the instance object, the value of the instance's property MUST be valid against the pattern name's schema value.
+
+
+
+
+ This attribute specifies how any instance property that is not explicitly defined by either the "properties" or "patternProperties" attributes (hereafter referred to as "additional properties") is handled. If specified, the value MUST be a schema or a boolean.
+ If a schema is provided, then all additional properties MUST be valid according to the schema.
+ If false is provided, then no additional properties are allowed.
+ The default value is an empty schema, which allows any value for additional properties.
+
+
+
+ This attribute provides the allowed items in an array instance. If specified, this attribute MUST be a schema or an array of schemas.
+ When this attribute value is a schema and the instance value is an array, then all the items in the array MUST be valid according to the schema.
+ When this attribute value is an array of schemas and the instance value is an array, each position in the instance array MUST be valid according to the schema in the corresponding position for this array. This called tuple typing. When tuple typing is used, additional items are allowed, disallowed, or constrained by the "additionalItems" attribute the same way as "additionalProperties" for objects is.
+
+
+
+ This attribute specifies how any item in the array instance that is not explicitly defined by "items" (hereafter referred to as "additional items") is handled. If specified, the value MUST be a schema or a boolean.
+ If a schema is provided:
+
+ If the "items" attribute is unspecified, then all items in the array instance must be valid against this schema.
+ If the "items" attribute is a schema, then this attribute is ignored.
+ If the "items" attribute is an array (during tuple typing), then any additional items MUST be valid against this schema.
+
+
+ If false is provided, then any additional items in the array are not allowed.
+ The default value is an empty schema, which allows any value for additional items.
+
+
+
+ This attribute is an array of strings that defines all the property names that must exist on the object instance.
+
+
+
+ This attribute is an object that specifies the requirements of a property on an object instance. If an object instance has a property with the same name as a property in this attribute's object, then the instance must be valid against the attribute's property value (hereafter referred to as the "dependency value").
+
+ The dependency value can take one of two forms:
+
+
+
+ If the dependency value is a string, then the instance object MUST have a property with the same name as the dependency value.
+ If the dependency value is an array of strings, then the instance object MUST have a property with the same name as each string in the dependency value's array.
+
+
+ If the dependency value is a schema, then the instance object MUST be valid against the schema.
+
+
+
+
+
+
+ This attribute defines the minimum value of the instance property when the type of the instance value is a number.
+
+
+
+ This attribute defines the maximum value of the instance property when the type of the instance value is a number.
+
+
+
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "minimum" attribute. This is false by default, meaning the instance value can be greater then or equal to the minimum value.
+
+
+
+ This attribute indicates if the value of the instance (if the instance is a number) can not equal the number defined by the "maximum" attribute. This is false by default, meaning the instance value can be less then or equal to the maximum value.
+
+
+
+ This attribute defines the minimum number of values in an array when the array is the instance value.
+
+
+
+ This attribute defines the maximum number of values in an array when the array is the instance value.
+
+
+
+ This attribute defines the minimum number of properties required on an object instance.
+
+
+
+ This attribute defines the maximum number of properties the object instance can have.
+
+
+
+ This attribute indicates that all items in an array instance MUST be unique (contains no two identical values).
+
+ Two instance are consider equal if they are both of the same type and:
+
+
+ are null; or
+ are booleans/numbers/strings and have the same value; or
+ are arrays, contains the same number of items, and each item in the array is equal to the item at the corresponding index in the other array; or
+ are objects, contains the same property names, and each property in the object is equal to the corresponding property in the other object.
+
+
+
+
+
+ When the instance value is a string, this provides a regular expression that a string instance MUST match in order to be valid. Regular expressions SHOULD follow the regular expression specification from ECMA 262/Perl 5
+
+
+
+ When the instance value is a string, this defines the minimum length of the string.
+
+
+
+ When the instance value is a string, this defines the maximum length of the string.
+
+
+
+ This provides an enumeration of all possible values that are valid for the instance property. This MUST be an array, and each item in the array represents a possible value for the instance value. If this attribute is defined, the instance value MUST be one of the values in the array in order for the schema to be valid. Comparison of enum values uses the same algorithm as defined in "uniqueItems".
+
+
+
+ This attribute defines the default value of the instance when the instance is undefined.
+
+
+
+ This attribute is a string that provides a short description of the instance property.
+
+
+
+ This attribute is a string that provides a full description of the of purpose the instance property.
+
+
+
+ This attribute defines what value the number instance must be divisible by with no remainder (the result of the division must be an integer.) The value of this attribute SHOULD NOT be 0.
+
+
+
+ This attribute takes the same values as the "type" attribute, however if the instance matches the type or if this value is an array and the instance matches any type or schema in the array, then this instance is not valid.
+
+
+
+ The value of this property MUST be another schema which will provide a base schema which the current schema will inherit from. The inheritance rules are such that any instance that is valid according to the current schema MUST be valid according to the referenced schema. This MAY also be an array, in which case, the instance MUST be valid for all the schemas in the array. A schema that extends another schema MAY define additional attributes, constrain existing attributes, or add other constraints.
+
+ Conceptually, the behavior of extends can be seen as validating an
+ instance against all constraints in the extending schema as well as
+ the extended schema(s). More optimized implementations that merge
+ schemas are possible, but are not required. Some examples of using "extends":
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ This attribute defines the current URI of this schema (this attribute is
+ effectively a "self" link). This URI MAY be relative or absolute. If
+ the URI is relative it is resolved against the current URI of the parent
+ schema it is contained in. If this schema is not contained in any
+ parent schema, the current URI of the parent schema is held to be the
+ URI under which this schema was addressed. If id is missing, the current URI of a schema is
+ defined to be that of the parent schema. The current URI of the schema
+ is also used to construct relative references such as for $ref.
+
+
+
+
+
+ This attribute defines a URI of a schema that contains the full representation of this schema.
+ When a validator encounters this attribute, it SHOULD replace the current schema with the schema referenced by the value's URI (if known and available) and re-validate the instance.
+ This URI MAY be relative or absolute, and relative URIs SHOULD be resolved against the URI of the current schema.
+
+
+
+
+
+ This attribute defines a URI of a JSON Schema that is the schema of the current schema.
+ When this attribute is defined, a validator SHOULD use the schema referenced by the value's URI (if known and available) when resolving Hyper Schemalinks.
+
+
+
+ A validator MAY use this attribute's value to determine which version of JSON Schema the current schema is written in, and provide the appropriate validation features and behavior.
+ Therefore, it is RECOMMENDED that all schema authors include this attribute in their schemas to prevent conflicts with future JSON Schema specification changes.
+
+
+
+
+
+
+ The following attributes are specified in addition to those
+ attributes that already provided by the core schema with the specific
+ purpose of informing user agents of relations between resources based
+ on JSON data. Just as with JSON
+ schema attributes, all the attributes in hyper schemas are optional.
+ Therefore, an empty object is a valid (non-informative) schema, and
+ essentially describes plain JSON (no constraints on the structures).
+ Addition of attributes provides additive information for user agents.
+
+
+
+
+ The value of the links property MUST be an array, where each item
+ in the array is a link description object which describes the link
+ relations of the instances.
+
+
+
+
+
+
+ A link description object is used to describe link relations. In
+ the context of a schema, it defines the link relations of the
+ instances of the schema, and can be parameterized by the instance
+ values. The link description format can be used without JSON Schema,
+ and use of this format can
+ be declared by referencing the normative link description
+ schema as the the schema for the data structure that uses the
+ links. The URI of the normative link description schema is:
+ http://json-schema.org/links (latest version) or
+ http://json-schema.org/draft-04/links (draft-04 version).
+
+
+
+
+ The value of the "href" link description property
+ indicates the target URI of the related resource. The value
+ of the instance property SHOULD be resolved as a URI-Reference per RFC 3986
+ and MAY be a relative URI. The base URI to be used for relative resolution
+ SHOULD be the URI used to retrieve the instance object (not the schema)
+ when used within a schema. Also, when links are used within a schema, the URI
+ SHOULD be parametrized by the property values of the instance
+ object, if property values exist for the corresponding variables
+ in the template (otherwise they MAY be provided from alternate sources, like user input).
+
+
+
+ Instance property values SHOULD be substituted into the URIs where
+ matching braces ('{', '}') are found surrounding zero or more characters,
+ creating an expanded URI. Instance property value substitutions are resolved
+ by using the text between the braces to denote the property name
+ from the instance to get the value to substitute.
+
+
+ For example, if an href value is defined:
+
+
+
+ Then it would be resolved by replace the value of the "id" property value from the instance object.
+
+
+
+ If the value of the "id" property was "45", the expanded URI would be:
+
+
+
+
+
+ If matching braces are found with the string "@" (no quotes) between the braces, then the
+ actual instance value SHOULD be used to replace the braces, rather than a property value.
+ This should only be used in situations where the instance is a scalar (string,
+ boolean, or number), and not for objects or arrays.
+
+
+
+
+
+ The value of the "rel" property indicates the name of the
+ relation to the target resource. The relation to the target SHOULD be interpreted as specifically from the instance object that the schema (or sub-schema) applies to, not just the top level resource that contains the object within its hierarchy. If a resource JSON representation contains a sub object with a property interpreted as a link, that sub-object holds the relation with the target. A relation to target from the top level resource MUST be indicated with the schema describing the top level JSON representation.
+
+
+
+ Relationship definitions SHOULD NOT be media type dependent, and users are encouraged to utilize existing accepted relation definitions, including those in existing relation registries (see RFC 4287). However, we define these relations here for clarity of normative interpretation within the context of JSON hyper schema defined relations:
+
+
+
+ If the relation value is "self", when this property is encountered in
+ the instance object, the object represents a resource and the instance object is
+ treated as a full representation of the target resource identified by
+ the specified URI.
+
+
+
+ This indicates that the target of the link is the full representation for the instance object. The object that contains this link possibly may not be the full representation.
+
+
+
+ This indicates the target of the link is the schema for the instance object. This MAY be used to specifically denote the schemas of objects within a JSON object hierarchy, facilitating polymorphic type data structures.
+
+
+
+ This relation indicates that the target of the link
+ SHOULD be treated as the root or the body of the representation for the
+ purposes of user agent interaction or fragment resolution. All other
+ properties of the instance objects can be regarded as meta-data
+ descriptions for the data.
+
+
+
+
+
+ The following relations are applicable for schemas (the schema as the "from" resource in the relation):
+
+
+ This indicates the target resource that represents collection of instances of a schema.
+ This indicates a target to use for creating new instances of a schema. This link definition SHOULD be a submission link with a non-safe method (like POST).
+
+
+
+
+
+ For example, if a schema is defined:
+
+
+
+
+
+
+ And if a collection of instance resource's JSON representation was retrieved:
+
+
+
+
+
+ This would indicate that for the first item in the collection, its own
+ (self) URI would resolve to "/Resource/thing" and the first item's "up"
+ relation SHOULD be resolved to the resource at "/Resource/parent".
+ The "children" collection would be located at "/Resource/?upId=thing".
+
+
+
+
+ This property value is a string that defines the templating language used in the "href" attribute. If no templating language is defined, then the default Link Description Object templating langauge is used.
+
+
+
+ This property value is a schema that defines the expected structure of the JSON representation of the target of the link.
+
+
+
+
+ The following properties also apply to link definition objects, and
+ provide functionality analogous to HTML forms, in providing a
+ means for submitting extra (often user supplied) information to send to a server.
+
+
+
+
+ This attribute defines which method can be used to access the target resource.
+ In an HTTP environment, this would be "GET" or "POST" (other HTTP methods
+ such as "PUT" and "DELETE" have semantics that are clearly implied by
+ accessed resources, and do not need to be defined here).
+ This defaults to "GET".
+
+
+
+
+
+ If present, this property indicates a query media type format that the server
+ supports for querying or posting to the collection of instances at the target
+ resource. The query can be
+ suffixed to the target URI to query the collection with
+ property-based constraints on the resources that SHOULD be returned from
+ the server or used to post data to the resource (depending on the method).
+
+
+ For example, with the following schema:
+
+
+
+ This indicates that the client can query the server for instances that have a specific name.
+
+
+
+ For example:
+
+
+
+
+
+ If no enctype or method is specified, only the single URI specified by
+ the href property is defined. If the method is POST, "application/json" is
+ the default media type.
+
+
+
+
+
+ This attribute contains a schema which defines the acceptable structure of the submitted
+ request (for a GET request, this schema would define the properties for the query string
+ and for a POST request, this would define the body).
+
+
+
+
+
+
+
+
+ This property indicates the fragment resolution protocol to use for
+ resolving fragment identifiers in URIs within the instance
+ representations. This applies to the instance object URIs and all
+ children of the instance object's URIs. The default fragment resolution
+ protocol is "json-pointer", which is defined below. Other fragment
+ resolution protocols MAY be used, but are not defined in this document.
+
+
+
+ The fragment identifier is based on RFC 3986, Sec 5, and defines the
+ mechanism for resolving references to entities within a document.
+
+
+
+ The "json-pointer" fragment resolution protocol uses a JSON Pointer to resolve fragment identifiers in URIs within instance representations.
+
+
+
+
+
+
+ This attribute indicates that the instance value SHOULD NOT be changed. Attempts by a user agent to modify the value of this property are expected to be rejected by a server.
+
+
+
+ If the instance property value is a string, this attribute defines that the string SHOULD be interpreted as binary data and decoded using the encoding named by this schema property. RFC 2045, Sec 6.1 lists the possible values for this property.
+
+
+
+
+ This attribute is a URI that defines what the instance's URI MUST start with in order to validate.
+ The value of the "pathStart" attribute MUST be resolved as per RFC 3986, Sec 5,
+ and is relative to the instance's URI.
+
+
+
+ When multiple schemas have been referenced for an instance, the user agent
+ can determine if this schema is applicable for a particular instance by
+ determining if the URI of the instance begins with the the value of the "pathStart"
+ attribute. If the URI of the instance does not start with this URI,
+ or if another schema specifies a starting URI that is longer and also matches the
+ instance, this schema SHOULD NOT be applied to the instance. Any schema
+ that does not have a pathStart attribute SHOULD be considered applicable
+ to all the instances for which it is referenced.
+
+
+
+
+ This attribute defines the media type of the instance representations that this schema is defining.
+
+
+
+
+
+ This specification is a sub-type of the JSON format, and
+ consequently the security considerations are generally the same as RFC 4627.
+ However, an additional issue is that when link relation of "self"
+ is used to denote a full representation of an object, the user agent
+ SHOULD NOT consider the representation to be the authoritative representation
+ of the resource denoted by the target URI if the target URI is not
+ equivalent to or a sub-path of the the URI used to request the resource
+ representation which contains the target URI with the "self" link.
+
+
+ For example, if a hyper schema was defined:
+
+
+
+
+
+
+ And a resource was requested from somesite.com:
+
+
+
+
+
+
+ With a response of:
+
+
+
+
+
+
+
+
+ The proposed MIME media type for JSON Schema is "application/schema+json".
+ Type name: application
+ Subtype name: schema+json
+ Required parameters: profile
+
+ The value of the profile parameter SHOULD be a URI (relative or absolute) that
+ refers to the schema used to define the structure of this structure (the
+ meta-schema). Normally the value would be http://json-schema.org/draft-04/hyper-schema,
+ but it is allowable to use other schemas that extend the hyper schema's meta-
+ schema.
+
+ Optional parameters: pretty
+ The value of the pretty parameter MAY be true or false to indicate if additional whitespace has been included to make the JSON representation easier to read.
+
+
+
+ This registry is maintained by IANA per RFC 4287 and this specification adds
+ four values: "full", "create", "instances", "root". New
+ assignments are subject to IESG Approval, as outlined in RFC 5226.
+ Requests should be made by email to IANA, which will then forward the
+ request to the IESG, requesting approval.
+
+
+
+
+
+
+
+
+ &rfc2045;
+ &rfc2119;
+ &rfc3339;
+ &rfc3986;
+ &rfc4287;
+
+
+ JSON Pointer
+
+ ForgeRock US, Inc.
+
+
+ SitePen (USA)
+
+
+
+
+
+
+ &rfc2616;
+ &rfc4627;
+ &rfc5226;
+ &iddiscovery;
+ &uritemplate;
+ &linkheader;
+ &html401;
+ &css21;
+
+
+
+
+
+
+
+ Changed "required" attribute to an array of strings.
+ Removed "format" attribute.
+ Added "minProperties" and "maxProperties" attributes.
+ Replaced "slash-delimited" fragment resolution with "json-pointer".
+ Added "template" LDO attribute.
+ Removed irrelevant "Open Issues" section.
+ Merged Conventions and Terminology sections.
+ Defined terms used in specification.
+ Removed "integer" type in favor of {"type":"number", "divisibleBy":1}.
+ Restricted "type" to only the core JSON types.
+ Improved wording of many sections.
+
+
+
+
+
+ Added example and verbiage to "extends" attribute.
+ Defined slash-delimited to use a leading slash.
+ Made "root" a relation instead of an attribute.
+ Removed address values, and MIME media type from format to reduce confusion (mediaType already exists, so it can be used for MIME types).
+ Added more explanation of nullability.
+ Removed "alternate" attribute.
+ Upper cased many normative usages of must, may, and should.
+ Replaced the link submission "properties" attribute to "schema" attribute.
+ Replaced "optional" attribute with "required" attribute.
+ Replaced "maximumCanEqual" attribute with "exclusiveMaximum" attribute.
+ Replaced "minimumCanEqual" attribute with "exclusiveMinimum" attribute.
+ Replaced "requires" attribute with "dependencies" attribute.
+ Moved "contentEncoding" attribute to hyper schema.
+ Added "additionalItems" attribute.
+ Added "id" attribute.
+ Switched self-referencing variable substitution from "-this" to "@" to align with reserved characters in URI template.
+ Added "patternProperties" attribute.
+ Schema URIs are now namespace versioned.
+ Added "$ref" and "$schema" attributes.
+
+
+
+
+
+ Replaced "maxDecimal" attribute with "divisibleBy" attribute.
+ Added slash-delimited fragment resolution protocol and made it the default.
+ Added language about using links outside of schemas by referencing its normative URI.
+ Added "uniqueItems" attribute.
+ Added "targetSchema" attribute to link description object.
+
+
+
+
+
+ Fixed category and updates from template.
+
+
+
+
+
+ Initial draft.
+
+
+
+
+
+
+
diff --git a/familyark/app/node_modules/json-schema/lib/links.js b/familyark/app/node_modules/json-schema/lib/links.js
new file mode 100644
index 0000000..8a87f02
--- /dev/null
+++ b/familyark/app/node_modules/json-schema/lib/links.js
@@ -0,0 +1,66 @@
+/**
+ * JSON Schema link handler
+ * Copyright (c) 2007 Kris Zyp SitePen (www.sitepen.com)
+ * Licensed under the MIT (MIT-LICENSE.txt) license.
+ */
+(function (root, factory) {
+ if (typeof define === 'function' && define.amd) {
+ // AMD. Register as an anonymous module.
+ define([], function () {
+ return factory();
+ });
+ } else if (typeof module === 'object' && module.exports) {
+ // Node. Does not work with strict CommonJS, but
+ // only CommonJS-like environments that support module.exports,
+ // like Node.
+ module.exports = factory();
+ } else {
+ // Browser globals
+ root.jsonSchemaLinks = factory();
+ }
+}(this, function () {// setup primitive classes to be JSON Schema types
+var exports = {};
+exports.cacheLinks = true;
+exports.getLink = function(relation, instance, schema){
+ // gets the URI of the link for the given relation based on the instance and schema
+ // for example:
+ // getLink(
+ // "brother",
+ // {"brother_id":33},
+ // {links:[{rel:"brother", href:"Brother/{brother_id}"}]}) ->
+ // "Brother/33"
+ var links = schema.__linkTemplates;
+ if(!links){
+ links = {};
+ var schemaLinks = schema.links;
+ if(schemaLinks && schemaLinks instanceof Array){
+ schemaLinks.forEach(function(link){
+ /* // TODO: allow for multiple same-name relations
+ if(links[link.rel]){
+ if(!(links[link.rel] instanceof Array)){
+ links[link.rel] = [links[link.rel]];
+ }
+ }*/
+ links[link.rel] = link.href;
+ });
+ }
+ if(exports.cacheLinks){
+ schema.__linkTemplates = links;
+ }
+ }
+ var linkTemplate = links[relation];
+ return linkTemplate && exports.substitute(linkTemplate, instance);
+};
+
+exports.substitute = function(linkTemplate, instance){
+ return linkTemplate.replace(/\{([^\}]*)\}/g, function(t, property){
+ var value = instance[decodeURIComponent(property)];
+ if(value instanceof Array){
+ // the value is an array, it should produce a URI like /Table/(4,5,8) and store.get() should handle that as an array of values
+ return '(' + value.join(',') + ')';
+ }
+ return value;
+ });
+};
+return exports;
+}));
\ No newline at end of file
diff --git a/familyark/app/node_modules/json-schema/lib/validate.js b/familyark/app/node_modules/json-schema/lib/validate.js
new file mode 100644
index 0000000..e4dc151
--- /dev/null
+++ b/familyark/app/node_modules/json-schema/lib/validate.js
@@ -0,0 +1,273 @@
+/**
+ * JSONSchema Validator - Validates JavaScript objects using JSON Schemas
+ * (http://www.json.com/json-schema-proposal/)
+ *
+ * Copyright (c) 2007 Kris Zyp SitePen (www.sitepen.com)
+ * Licensed under the MIT (MIT-LICENSE.txt) license.
+To use the validator call the validate function with an instance object and an optional schema object.
+If a schema is provided, it will be used to validate. If the instance object refers to a schema (self-validating),
+that schema will be used to validate and the schema parameter is not necessary (if both exist,
+both validations will occur).
+The validate method will return an array of validation errors. If there are no errors, then an
+empty list will be returned. A validation error will have two properties:
+"property" which indicates which property had the error
+"message" which indicates what the error was
+ */
+(function (root, factory) {
+ if (typeof define === 'function' && define.amd) {
+ // AMD. Register as an anonymous module.
+ define([], function () {
+ return factory();
+ });
+ } else if (typeof module === 'object' && module.exports) {
+ // Node. Does not work with strict CommonJS, but
+ // only CommonJS-like environments that support module.exports,
+ // like Node.
+ module.exports = factory();
+ } else {
+ // Browser globals
+ root.jsonSchema = factory();
+ }
+}(this, function () {// setup primitive classes to be JSON Schema types
+var exports = validate
+exports.Integer = {type:"integer"};
+var primitiveConstructors = {
+ String: String,
+ Boolean: Boolean,
+ Number: Number,
+ Object: Object,
+ Array: Array,
+ Date: Date
+}
+exports.validate = validate;
+function validate(/*Any*/instance,/*Object*/schema) {
+ // Summary:
+ // To use the validator call JSONSchema.validate with an instance object and an optional schema object.
+ // If a schema is provided, it will be used to validate. If the instance object refers to a schema (self-validating),
+ // that schema will be used to validate and the schema parameter is not necessary (if both exist,
+ // both validations will occur).
+ // The validate method will return an object with two properties:
+ // valid: A boolean indicating if the instance is valid by the schema
+ // errors: An array of validation errors. If there are no errors, then an
+ // empty list will be returned. A validation error will have two properties:
+ // property: which indicates which property had the error
+ // message: which indicates what the error was
+ //
+ return validate(instance, schema, {changing: false});//, coerce: false, existingOnly: false});
+ };
+exports.checkPropertyChange = function(/*Any*/value,/*Object*/schema, /*String*/property) {
+ // Summary:
+ // The checkPropertyChange method will check to see if an value can legally be in property with the given schema
+ // This is slightly different than the validate method in that it will fail if the schema is readonly and it will
+ // not check for self-validation, it is assumed that the passed in value is already internally valid.
+ // The checkPropertyChange method will return the same object type as validate, see JSONSchema.validate for
+ // information.
+ //
+ return validate(value, schema, {changing: property || "property"});
+ };
+var validate = exports._validate = function(/*Any*/instance,/*Object*/schema,/*Object*/options) {
+
+ if (!options) options = {};
+ var _changing = options.changing;
+
+ function getType(schema){
+ return schema.type || (primitiveConstructors[schema.name] == schema && schema.name.toLowerCase());
+ }
+ var errors = [];
+ // validate a value against a property definition
+ function checkProp(value, schema, path,i){
+
+ var l;
+ path += path ? typeof i == 'number' ? '[' + i + ']' : typeof i == 'undefined' ? '' : '.' + i : i;
+ function addError(message){
+ errors.push({property:path,message:message});
+ }
+
+ if((typeof schema != 'object' || schema instanceof Array) && (path || typeof schema != 'function') && !(schema && getType(schema))){
+ if(typeof schema == 'function'){
+ if(!(value instanceof schema)){
+ addError("is not an instance of the class/constructor " + schema.name);
+ }
+ }else if(schema){
+ addError("Invalid schema/property definition " + schema);
+ }
+ return null;
+ }
+ if(_changing && schema.readonly){
+ addError("is a readonly field, it can not be changed");
+ }
+ if(schema['extends']){ // if it extends another schema, it must pass that schema as well
+ checkProp(value,schema['extends'],path,i);
+ }
+ // validate a value against a type definition
+ function checkType(type,value){
+ if(type){
+ if(typeof type == 'string' && type != 'any' &&
+ (type == 'null' ? value !== null : typeof value != type) &&
+ !(value instanceof Array && type == 'array') &&
+ !(value instanceof Date && type == 'date') &&
+ !(type == 'integer' && value%1===0)){
+ return [{property:path,message:(typeof value) + " value found, but a " + type + " is required"}];
+ }
+ if(type instanceof Array){
+ var unionErrors=[];
+ for(var j = 0; j < type.length; j++){ // a union type
+ if(!(unionErrors=checkType(type[j],value)).length){
+ break;
+ }
+ }
+ if(unionErrors.length){
+ return unionErrors;
+ }
+ }else if(typeof type == 'object'){
+ var priorErrors = errors;
+ errors = [];
+ checkProp(value,type,path);
+ var theseErrors = errors;
+ errors = priorErrors;
+ return theseErrors;
+ }
+ }
+ return [];
+ }
+ if(value === undefined){
+ if(schema.required){
+ addError("is missing and it is required");
+ }
+ }else{
+ errors = errors.concat(checkType(getType(schema),value));
+ if(schema.disallow && !checkType(schema.disallow,value).length){
+ addError(" disallowed value was matched");
+ }
+ if(value !== null){
+ if(value instanceof Array){
+ if(schema.items){
+ var itemsIsArray = schema.items instanceof Array;
+ var propDef = schema.items;
+ for (i = 0, l = value.length; i < l; i += 1) {
+ if (itemsIsArray)
+ propDef = schema.items[i];
+ if (options.coerce)
+ value[i] = options.coerce(value[i], propDef);
+ errors.concat(checkProp(value[i],propDef,path,i));
+ }
+ }
+ if(schema.minItems && value.length < schema.minItems){
+ addError("There must be a minimum of " + schema.minItems + " in the array");
+ }
+ if(schema.maxItems && value.length > schema.maxItems){
+ addError("There must be a maximum of " + schema.maxItems + " in the array");
+ }
+ }else if(schema.properties || schema.additionalProperties){
+ errors.concat(checkObj(value, schema.properties, path, schema.additionalProperties));
+ }
+ if(schema.pattern && typeof value == 'string' && !value.match(schema.pattern)){
+ addError("does not match the regex pattern " + schema.pattern);
+ }
+ if(schema.maxLength && typeof value == 'string' && value.length > schema.maxLength){
+ addError("may only be " + schema.maxLength + " characters long");
+ }
+ if(schema.minLength && typeof value == 'string' && value.length < schema.minLength){
+ addError("must be at least " + schema.minLength + " characters long");
+ }
+ if(typeof schema.minimum !== undefined && typeof value == typeof schema.minimum &&
+ schema.minimum > value){
+ addError("must have a minimum value of " + schema.minimum);
+ }
+ if(typeof schema.maximum !== undefined && typeof value == typeof schema.maximum &&
+ schema.maximum < value){
+ addError("must have a maximum value of " + schema.maximum);
+ }
+ if(schema['enum']){
+ var enumer = schema['enum'];
+ l = enumer.length;
+ var found;
+ for(var j = 0; j < l; j++){
+ if(enumer[j]===value){
+ found=1;
+ break;
+ }
+ }
+ if(!found){
+ addError("does not have a value in the enumeration " + enumer.join(", "));
+ }
+ }
+ if(typeof schema.maxDecimal == 'number' &&
+ (value.toString().match(new RegExp("\\.[0-9]{" + (schema.maxDecimal + 1) + ",}")))){
+ addError("may only have " + schema.maxDecimal + " digits of decimal places");
+ }
+ }
+ }
+ return null;
+ }
+ // validate an object against a schema
+ function checkObj(instance,objTypeDef,path,additionalProp){
+
+ if(typeof objTypeDef =='object'){
+ if(typeof instance != 'object' || instance instanceof Array){
+ errors.push({property:path,message:"an object is required"});
+ }
+
+ for(var i in objTypeDef){
+ if(objTypeDef.hasOwnProperty(i)){
+ var value = instance[i];
+ // skip _not_ specified properties
+ if (value === undefined && options.existingOnly) continue;
+ var propDef = objTypeDef[i];
+ // set default
+ if(value === undefined && propDef["default"]){
+ value = instance[i] = propDef["default"];
+ }
+ if(options.coerce && i in instance){
+ value = instance[i] = options.coerce(value, propDef);
+ }
+ checkProp(value,propDef,path,i);
+ }
+ }
+ }
+ for(i in instance){
+ if(instance.hasOwnProperty(i) && !(i.charAt(0) == '_' && i.charAt(1) == '_') && objTypeDef && !objTypeDef[i] && additionalProp===false){
+ if (options.filter) {
+ delete instance[i];
+ continue;
+ } else {
+ errors.push({property:path,message:(typeof value) + "The property " + i +
+ " is not defined in the schema and the schema does not allow additional properties"});
+ }
+ }
+ var requires = objTypeDef && objTypeDef[i] && objTypeDef[i].requires;
+ if(requires && !(requires in instance)){
+ errors.push({property:path,message:"the presence of the property " + i + " requires that " + requires + " also be present"});
+ }
+ value = instance[i];
+ if(additionalProp && (!(objTypeDef && typeof objTypeDef == 'object') || !(i in objTypeDef))){
+ if(options.coerce){
+ value = instance[i] = options.coerce(value, additionalProp);
+ }
+ checkProp(value,additionalProp,path,i);
+ }
+ if(!_changing && value && value.$schema){
+ errors = errors.concat(checkProp(value,value.$schema,path,i));
+ }
+ }
+ return errors;
+ }
+ if(schema){
+ checkProp(instance,schema,'',_changing || '');
+ }
+ if(!_changing && instance && instance.$schema){
+ checkProp(instance,instance.$schema,'','');
+ }
+ return {valid:!errors.length,errors:errors};
+};
+exports.mustBeValid = function(result){
+ // summary:
+ // This checks to ensure that the result is valid and will throw an appropriate error message if it is not
+ // result: the result returned from checkPropertyChange or validate
+ if(!result.valid){
+ throw new TypeError(result.errors.map(function(error){return "for property " + error.property + ': ' + error.message;}).join(", \n"));
+ }
+}
+
+return exports;
+}));
diff --git a/familyark/app/node_modules/json-schema/package.json b/familyark/app/node_modules/json-schema/package.json
new file mode 100644
index 0000000..db3127a
--- /dev/null
+++ b/familyark/app/node_modules/json-schema/package.json
@@ -0,0 +1,71 @@
+{
+ "_from": "json-schema@0.2.3",
+ "_id": "json-schema@0.2.3",
+ "_inBundle": false,
+ "_integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=",
+ "_location": "/json-schema",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "version",
+ "registry": true,
+ "raw": "json-schema@0.2.3",
+ "name": "json-schema",
+ "escapedName": "json-schema",
+ "rawSpec": "0.2.3",
+ "saveSpec": null,
+ "fetchSpec": "0.2.3"
+ },
+ "_requiredBy": [
+ "/jsprim"
+ ],
+ "_resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz",
+ "_shasum": "b480c892e59a2f05954ce727bd3f2a4e882f9e13",
+ "_spec": "json-schema@0.2.3",
+ "_where": "/home/g/Workspace/hatthieves/familyARK/familyark/app/node_modules/jsprim",
+ "author": {
+ "name": "Kris Zyp"
+ },
+ "bugs": {
+ "url": "https://github.com/kriszyp/json-schema/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "JSON Schema validation and specifications",
+ "devDependencies": {
+ "vows": "*"
+ },
+ "directories": {
+ "lib": "./lib"
+ },
+ "homepage": "https://github.com/kriszyp/json-schema#readme",
+ "keywords": [
+ "json",
+ "schema"
+ ],
+ "licenses": [
+ {
+ "type": "AFLv2.1",
+ "url": "http://trac.dojotoolkit.org/browser/dojo/trunk/LICENSE#L43"
+ },
+ {
+ "type": "BSD",
+ "url": "http://trac.dojotoolkit.org/browser/dojo/trunk/LICENSE#L13"
+ }
+ ],
+ "main": "./lib/validate.js",
+ "maintainers": [
+ {
+ "name": "Kris Zyp",
+ "email": "kriszyp@gmail.com"
+ }
+ ],
+ "name": "json-schema",
+ "repository": {
+ "type": "git",
+ "url": "git+ssh://git@github.com/kriszyp/json-schema.git"
+ },
+ "scripts": {
+ "test": "echo TESTS DISABLED vows --spec test/*.js"
+ },
+ "version": "0.2.3"
+}
diff --git a/familyark/app/node_modules/json-schema/test/tests.js b/familyark/app/node_modules/json-schema/test/tests.js
new file mode 100644
index 0000000..2938aea
--- /dev/null
+++ b/familyark/app/node_modules/json-schema/test/tests.js
@@ -0,0 +1,95 @@
+var assert = require('assert');
+var vows = require('vows');
+var path = require('path');
+var fs = require('fs');
+
+var validate = require('../lib/validate').validate;
+
+
+var revision = 'draft-03';
+var schemaRoot = path.join(__dirname, '..', revision);
+var schemaNames = ['schema', 'hyper-schema', 'links', 'json-ref' ];
+var schemas = {};
+
+schemaNames.forEach(function(name) {
+ var file = path.join(schemaRoot, name);
+ schemas[name] = loadSchema(file);
+});
+
+schemaNames.forEach(function(name) {
+ var s, n = name+'-nsd', f = path.join(schemaRoot, name);
+ schemas[n] = loadSchema(f);
+ s = schemas[n];
+ delete s['$schema'];
+});
+
+function loadSchema(path) {
+ var data = fs.readFileSync(path, 'utf-8');
+ var schema = JSON.parse(data);
+ return schema;
+}
+
+function resultIsValid() {
+ return function(result) {
+ assert.isObject(result);
+ //assert.isBoolean(result.valid);
+ assert.equal(typeof(result.valid), 'boolean');
+ assert.isArray(result.errors);
+ for (var i = 0; i < result.errors.length; i++) {
+ assert.notEqual(result.errors[i], null, 'errors['+i+'] is null');
+ }
+ }
+}
+
+function assertValidates(doc, schema) {
+ var context = {};
+
+ context[': validate('+doc+', '+schema+')'] = {
+ topic: validate(schemas[doc], schemas[schema]),
+ 'returns valid result': resultIsValid(),
+ 'with valid=true': function(result) { assert.equal(result.valid, true); },
+ 'and no errors': function(result) {
+ // XXX work-around for bug in vows: [null] chokes it
+ if (result.errors[0] == null) assert.fail('(errors contains null)');
+ assert.length(result.errors, 0);
+ }
+ };
+
+ return context;
+}
+
+function assertSelfValidates(doc) {
+ var context = {};
+
+ context[': validate('+doc+')'] = {
+ topic: validate(schemas[doc]),
+ 'returns valid result': resultIsValid(),
+ 'with valid=true': function(result) { assert.equal(result.valid, true); },
+ 'and no errors': function(result) { assert.length(result.errors, 0); }
+ };
+
+ return context;
+}
+
+var suite = vows.describe('JSON Schema').addBatch({
+ 'Core-NSD self-validates': assertSelfValidates('schema-nsd'),
+ 'Core-NSD/Core-NSD': assertValidates('schema-nsd', 'schema-nsd'),
+ 'Core-NSD/Core': assertValidates('schema-nsd', 'schema'),
+
+ 'Core self-validates': assertSelfValidates('schema'),
+ 'Core/Core': assertValidates('schema', 'schema'),
+
+ 'Hyper-NSD self-validates': assertSelfValidates('hyper-schema-nsd'),
+ 'Hyper self-validates': assertSelfValidates('hyper-schema'),
+ 'Hyper/Hyper': assertValidates('hyper-schema', 'hyper-schema'),
+ 'Hyper/Core': assertValidates('hyper-schema', 'schema'),
+
+ 'Links-NSD self-validates': assertSelfValidates('links-nsd'),
+ 'Links self-validates': assertSelfValidates('links'),
+ 'Links/Hyper': assertValidates('links', 'hyper-schema'),
+ 'Links/Core': assertValidates('links', 'schema'),
+
+ 'Json-Ref self-validates': assertSelfValidates('json-ref'),
+ 'Json-Ref/Hyper': assertValidates('json-ref', 'hyper-schema'),
+ 'Json-Ref/Core': assertValidates('json-ref', 'schema')
+}).export(module);
diff --git a/familyark/app/node_modules/json-stringify-safe/.npmignore b/familyark/app/node_modules/json-stringify-safe/.npmignore
new file mode 100644
index 0000000..17d6b36
--- /dev/null
+++ b/familyark/app/node_modules/json-stringify-safe/.npmignore
@@ -0,0 +1 @@
+/*.tgz
diff --git a/familyark/app/node_modules/json-stringify-safe/CHANGELOG.md b/familyark/app/node_modules/json-stringify-safe/CHANGELOG.md
new file mode 100644
index 0000000..42bcb60
--- /dev/null
+++ b/familyark/app/node_modules/json-stringify-safe/CHANGELOG.md
@@ -0,0 +1,14 @@
+## Unreleased
+- Fixes stringify to only take ancestors into account when checking
+ circularity.
+ It previously assumed every visited object was circular which led to [false
+ positives][issue9].
+ Uses the tiny serializer I wrote for [Must.js][must] a year and a half ago.
+- Fixes calling the `replacer` function in the proper context (`thisArg`).
+- Fixes calling the `cycleReplacer` function in the proper context (`thisArg`).
+- Speeds serializing by a factor of
+ Big-O(h-my-god-it-linearly-searched-every-object) it had ever seen. Searching
+ only the ancestors for a circular references speeds up things considerably.
+
+[must]: https://github.com/moll/js-must
+[issue9]: https://github.com/isaacs/json-stringify-safe/issues/9
diff --git a/familyark/app/node_modules/json-stringify-safe/LICENSE b/familyark/app/node_modules/json-stringify-safe/LICENSE
new file mode 100644
index 0000000..19129e3
--- /dev/null
+++ b/familyark/app/node_modules/json-stringify-safe/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/familyark/app/node_modules/json-stringify-safe/Makefile b/familyark/app/node_modules/json-stringify-safe/Makefile
new file mode 100644
index 0000000..36088c7
--- /dev/null
+++ b/familyark/app/node_modules/json-stringify-safe/Makefile
@@ -0,0 +1,35 @@
+NODE_OPTS =
+TEST_OPTS =
+
+love:
+ @echo "Feel like makin' love."
+
+test:
+ @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R dot $(TEST_OPTS)
+
+spec:
+ @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R spec $(TEST_OPTS)
+
+autotest:
+ @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R dot --watch $(TEST_OPTS)
+
+autospec:
+ @node $(NODE_OPTS) ./node_modules/.bin/_mocha -R spec --watch $(TEST_OPTS)
+
+pack:
+ @file=$$(npm pack); echo "$$file"; tar tf "$$file"
+
+publish:
+ npm publish
+
+tag:
+ git tag "v$$(node -e 'console.log(require("./package").version)')"
+
+clean:
+ rm -f *.tgz
+ npm prune --production
+
+.PHONY: love
+.PHONY: test spec autotest autospec
+.PHONY: pack publish tag
+.PHONY: clean
diff --git a/familyark/app/node_modules/json-stringify-safe/README.md b/familyark/app/node_modules/json-stringify-safe/README.md
new file mode 100644
index 0000000..a11f302
--- /dev/null
+++ b/familyark/app/node_modules/json-stringify-safe/README.md
@@ -0,0 +1,52 @@
+# json-stringify-safe
+
+Like JSON.stringify, but doesn't throw on circular references.
+
+## Usage
+
+Takes the same arguments as `JSON.stringify`.
+
+```javascript
+var stringify = require('json-stringify-safe');
+var circularObj = {};
+circularObj.circularRef = circularObj;
+circularObj.list = [ circularObj, circularObj ];
+console.log(stringify(circularObj, null, 2));
+```
+
+Output:
+
+```json
+{
+ "circularRef": "[Circular]",
+ "list": [
+ "[Circular]",
+ "[Circular]"
+ ]
+}
+```
+
+## Details
+
+```
+stringify(obj, serializer, indent, decycler)
+```
+
+The first three arguments are the same as to JSON.stringify. The last
+is an argument that's only used when the object has been seen already.
+
+The default `decycler` function returns the string `'[Circular]'`.
+If, for example, you pass in `function(k,v){}` (return nothing) then it
+will prune cycles. If you pass in `function(k,v){ return {foo: 'bar'}}`,
+then cyclical objects will always be represented as `{"foo":"bar"}` in
+the result.
+
+```
+stringify.getSerialize(serializer, decycler)
+```
+
+Returns a serializer that can be used elsewhere. This is the actual
+function that's passed to JSON.stringify.
+
+**Note** that the function returned from `getSerialize` is stateful for now, so
+do **not** use it more than once.
diff --git a/familyark/app/node_modules/json-stringify-safe/package.json b/familyark/app/node_modules/json-stringify-safe/package.json
new file mode 100644
index 0000000..38d5457
--- /dev/null
+++ b/familyark/app/node_modules/json-stringify-safe/package.json
@@ -0,0 +1,66 @@
+{
+ "_from": "json-stringify-safe@~5.0.1",
+ "_id": "json-stringify-safe@5.0.1",
+ "_inBundle": false,
+ "_integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=",
+ "_location": "/json-stringify-safe",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "json-stringify-safe@~5.0.1",
+ "name": "json-stringify-safe",
+ "escapedName": "json-stringify-safe",
+ "rawSpec": "~5.0.1",
+ "saveSpec": null,
+ "fetchSpec": "~5.0.1"
+ },
+ "_requiredBy": [
+ "/request"
+ ],
+ "_resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
+ "_shasum": "1296a2d58fd45f19a0f6ce01d65701e2c735b6eb",
+ "_spec": "json-stringify-safe@~5.0.1",
+ "_where": "/home/g/Workspace/hatthieves/familyARK/familyark/app/node_modules/request",
+ "author": {
+ "name": "Isaac Z. Schlueter",
+ "email": "i@izs.me",
+ "url": "http://blog.izs.me"
+ },
+ "bugs": {
+ "url": "https://github.com/isaacs/json-stringify-safe/issues"
+ },
+ "bundleDependencies": false,
+ "contributors": [
+ {
+ "name": "Andri Möll",
+ "email": "andri@dot.ee",
+ "url": "http://themoll.com"
+ }
+ ],
+ "deprecated": false,
+ "description": "Like JSON.stringify, but doesn't blow up on circular refs.",
+ "devDependencies": {
+ "mocha": ">= 2.1.0 < 3",
+ "must": ">= 0.12 < 0.13",
+ "sinon": ">= 1.12.2 < 2"
+ },
+ "homepage": "https://github.com/isaacs/json-stringify-safe",
+ "keywords": [
+ "json",
+ "stringify",
+ "circular",
+ "safe"
+ ],
+ "license": "ISC",
+ "main": "stringify.js",
+ "name": "json-stringify-safe",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/isaacs/json-stringify-safe.git"
+ },
+ "scripts": {
+ "test": "node test.js"
+ },
+ "version": "5.0.1"
+}
diff --git a/familyark/app/node_modules/json-stringify-safe/stringify.js b/familyark/app/node_modules/json-stringify-safe/stringify.js
new file mode 100644
index 0000000..124a452
--- /dev/null
+++ b/familyark/app/node_modules/json-stringify-safe/stringify.js
@@ -0,0 +1,27 @@
+exports = module.exports = stringify
+exports.getSerialize = serializer
+
+function stringify(obj, replacer, spaces, cycleReplacer) {
+ return JSON.stringify(obj, serializer(replacer, cycleReplacer), spaces)
+}
+
+function serializer(replacer, cycleReplacer) {
+ var stack = [], keys = []
+
+ if (cycleReplacer == null) cycleReplacer = function(key, value) {
+ if (stack[0] === value) return "[Circular ~]"
+ return "[Circular ~." + keys.slice(0, stack.indexOf(value)).join(".") + "]"
+ }
+
+ return function(key, value) {
+ if (stack.length > 0) {
+ var thisPos = stack.indexOf(this)
+ ~thisPos ? stack.splice(thisPos + 1) : stack.push(this)
+ ~thisPos ? keys.splice(thisPos, Infinity, key) : keys.push(key)
+ if (~stack.indexOf(value)) value = cycleReplacer.call(this, key, value)
+ }
+ else stack.push(value)
+
+ return replacer == null ? value : replacer.call(this, key, value)
+ }
+}
diff --git a/familyark/app/node_modules/json-stringify-safe/test/mocha.opts b/familyark/app/node_modules/json-stringify-safe/test/mocha.opts
new file mode 100644
index 0000000..2544e58
--- /dev/null
+++ b/familyark/app/node_modules/json-stringify-safe/test/mocha.opts
@@ -0,0 +1,2 @@
+--recursive
+--require must
diff --git a/familyark/app/node_modules/json-stringify-safe/test/stringify_test.js b/familyark/app/node_modules/json-stringify-safe/test/stringify_test.js
new file mode 100644
index 0000000..5b32583
--- /dev/null
+++ b/familyark/app/node_modules/json-stringify-safe/test/stringify_test.js
@@ -0,0 +1,246 @@
+var Sinon = require("sinon")
+var stringify = require("..")
+function jsonify(obj) { return JSON.stringify(obj, null, 2) }
+
+describe("Stringify", function() {
+ it("must stringify circular objects", function() {
+ var obj = {name: "Alice"}
+ obj.self = obj
+ var json = stringify(obj, null, 2)
+ json.must.eql(jsonify({name: "Alice", self: "[Circular ~]"}))
+ })
+
+ it("must stringify circular objects with intermediaries", function() {
+ var obj = {name: "Alice"}
+ obj.identity = {self: obj}
+ var json = stringify(obj, null, 2)
+ json.must.eql(jsonify({name: "Alice", identity: {self: "[Circular ~]"}}))
+ })
+
+ it("must stringify circular objects deeper", function() {
+ var obj = {name: "Alice", child: {name: "Bob"}}
+ obj.child.self = obj.child
+
+ stringify(obj, null, 2).must.eql(jsonify({
+ name: "Alice",
+ child: {name: "Bob", self: "[Circular ~.child]"}
+ }))
+ })
+
+ it("must stringify circular objects deeper with intermediaries", function() {
+ var obj = {name: "Alice", child: {name: "Bob"}}
+ obj.child.identity = {self: obj.child}
+
+ stringify(obj, null, 2).must.eql(jsonify({
+ name: "Alice",
+ child: {name: "Bob", identity: {self: "[Circular ~.child]"}}
+ }))
+ })
+
+ it("must stringify circular objects in an array", function() {
+ var obj = {name: "Alice"}
+ obj.self = [obj, obj]
+
+ stringify(obj, null, 2).must.eql(jsonify({
+ name: "Alice", self: ["[Circular ~]", "[Circular ~]"]
+ }))
+ })
+
+ it("must stringify circular objects deeper in an array", function() {
+ var obj = {name: "Alice", children: [{name: "Bob"}, {name: "Eve"}]}
+ obj.children[0].self = obj.children[0]
+ obj.children[1].self = obj.children[1]
+
+ stringify(obj, null, 2).must.eql(jsonify({
+ name: "Alice",
+ children: [
+ {name: "Bob", self: "[Circular ~.children.0]"},
+ {name: "Eve", self: "[Circular ~.children.1]"}
+ ]
+ }))
+ })
+
+ it("must stringify circular arrays", function() {
+ var obj = []
+ obj.push(obj)
+ obj.push(obj)
+ var json = stringify(obj, null, 2)
+ json.must.eql(jsonify(["[Circular ~]", "[Circular ~]"]))
+ })
+
+ it("must stringify circular arrays with intermediaries", function() {
+ var obj = []
+ obj.push({name: "Alice", self: obj})
+ obj.push({name: "Bob", self: obj})
+
+ stringify(obj, null, 2).must.eql(jsonify([
+ {name: "Alice", self: "[Circular ~]"},
+ {name: "Bob", self: "[Circular ~]"}
+ ]))
+ })
+
+ it("must stringify repeated objects in objects", function() {
+ var obj = {}
+ var alice = {name: "Alice"}
+ obj.alice1 = alice
+ obj.alice2 = alice
+
+ stringify(obj, null, 2).must.eql(jsonify({
+ alice1: {name: "Alice"},
+ alice2: {name: "Alice"}
+ }))
+ })
+
+ it("must stringify repeated objects in arrays", function() {
+ var alice = {name: "Alice"}
+ var obj = [alice, alice]
+ var json = stringify(obj, null, 2)
+ json.must.eql(jsonify([{name: "Alice"}, {name: "Alice"}]))
+ })
+
+ it("must call given decycler and use its output", function() {
+ var obj = {}
+ obj.a = obj
+ obj.b = obj
+
+ var decycle = Sinon.spy(function() { return decycle.callCount })
+ var json = stringify(obj, null, 2, decycle)
+ json.must.eql(jsonify({a: 1, b: 2}, null, 2))
+
+ decycle.callCount.must.equal(2)
+ decycle.thisValues[0].must.equal(obj)
+ decycle.args[0][0].must.equal("a")
+ decycle.args[0][1].must.equal(obj)
+ decycle.thisValues[1].must.equal(obj)
+ decycle.args[1][0].must.equal("b")
+ decycle.args[1][1].must.equal(obj)
+ })
+
+ it("must call replacer and use its output", function() {
+ var obj = {name: "Alice", child: {name: "Bob"}}
+
+ var replacer = Sinon.spy(bangString)
+ var json = stringify(obj, replacer, 2)
+ json.must.eql(jsonify({name: "Alice!", child: {name: "Bob!"}}))
+
+ replacer.callCount.must.equal(4)
+ replacer.args[0][0].must.equal("")
+ replacer.args[0][1].must.equal(obj)
+ replacer.thisValues[1].must.equal(obj)
+ replacer.args[1][0].must.equal("name")
+ replacer.args[1][1].must.equal("Alice")
+ replacer.thisValues[2].must.equal(obj)
+ replacer.args[2][0].must.equal("child")
+ replacer.args[2][1].must.equal(obj.child)
+ replacer.thisValues[3].must.equal(obj.child)
+ replacer.args[3][0].must.equal("name")
+ replacer.args[3][1].must.equal("Bob")
+ })
+
+ it("must call replacer after describing circular references", function() {
+ var obj = {name: "Alice"}
+ obj.self = obj
+
+ var replacer = Sinon.spy(bangString)
+ var json = stringify(obj, replacer, 2)
+ json.must.eql(jsonify({name: "Alice!", self: "[Circular ~]!"}))
+
+ replacer.callCount.must.equal(3)
+ replacer.args[0][0].must.equal("")
+ replacer.args[0][1].must.equal(obj)
+ replacer.thisValues[1].must.equal(obj)
+ replacer.args[1][0].must.equal("name")
+ replacer.args[1][1].must.equal("Alice")
+ replacer.thisValues[2].must.equal(obj)
+ replacer.args[2][0].must.equal("self")
+ replacer.args[2][1].must.equal("[Circular ~]")
+ })
+
+ it("must call given decycler and use its output for nested objects",
+ function() {
+ var obj = {}
+ obj.a = obj
+ obj.b = {self: obj}
+
+ var decycle = Sinon.spy(function() { return decycle.callCount })
+ var json = stringify(obj, null, 2, decycle)
+ json.must.eql(jsonify({a: 1, b: {self: 2}}))
+
+ decycle.callCount.must.equal(2)
+ decycle.args[0][0].must.equal("a")
+ decycle.args[0][1].must.equal(obj)
+ decycle.args[1][0].must.equal("self")
+ decycle.args[1][1].must.equal(obj)
+ })
+
+ it("must use decycler's output when it returned null", function() {
+ var obj = {a: "b"}
+ obj.self = obj
+ obj.selves = [obj, obj]
+
+ function decycle() { return null }
+ stringify(obj, null, 2, decycle).must.eql(jsonify({
+ a: "b",
+ self: null,
+ selves: [null, null]
+ }))
+ })
+
+ it("must use decycler's output when it returned undefined", function() {
+ var obj = {a: "b"}
+ obj.self = obj
+ obj.selves = [obj, obj]
+
+ function decycle() {}
+ stringify(obj, null, 2, decycle).must.eql(jsonify({
+ a: "b",
+ selves: [null, null]
+ }))
+ })
+
+ it("must throw given a decycler that returns a cycle", function() {
+ var obj = {}
+ obj.self = obj
+ var err
+ function identity(key, value) { return value }
+ try { stringify(obj, null, 2, identity) } catch (ex) { err = ex }
+ err.must.be.an.instanceof(TypeError)
+ })
+
+ describe(".getSerialize", function() {
+ it("must stringify circular objects", function() {
+ var obj = {a: "b"}
+ obj.circularRef = obj
+ obj.list = [obj, obj]
+
+ var json = JSON.stringify(obj, stringify.getSerialize(), 2)
+ json.must.eql(jsonify({
+ "a": "b",
+ "circularRef": "[Circular ~]",
+ "list": ["[Circular ~]", "[Circular ~]"]
+ }))
+ })
+
+ // This is the behavior as of Mar 3, 2015.
+ // The serializer function keeps state inside the returned function and
+ // so far I'm not sure how to not do that. JSON.stringify's replacer is not
+ // called _after_ serialization.
+ xit("must return a function that could be called twice", function() {
+ var obj = {name: "Alice"}
+ obj.self = obj
+
+ var json
+ var serializer = stringify.getSerialize()
+
+ json = JSON.stringify(obj, serializer, 2)
+ json.must.eql(jsonify({name: "Alice", self: "[Circular ~]"}))
+
+ json = JSON.stringify(obj, serializer, 2)
+ json.must.eql(jsonify({name: "Alice", self: "[Circular ~]"}))
+ })
+ })
+})
+
+function bangString(key, value) {
+ return typeof value == "string" ? value + "!" : value
+}
diff --git a/familyark/app/node_modules/jsprim/CHANGES.md b/familyark/app/node_modules/jsprim/CHANGES.md
new file mode 100644
index 0000000..c52d39d
--- /dev/null
+++ b/familyark/app/node_modules/jsprim/CHANGES.md
@@ -0,0 +1,49 @@
+# Changelog
+
+## not yet released
+
+None yet.
+
+## v1.4.1 (2017-08-02)
+
+* #21 Update verror dep
+* #22 Update extsprintf dependency
+* #23 update contribution guidelines
+
+## v1.4.0 (2017-03-13)
+
+* #7 Add parseInteger() function for safer number parsing
+
+## v1.3.1 (2016-09-12)
+
+* #13 Incompatible with webpack
+
+## v1.3.0 (2016-06-22)
+
+* #14 add safer version of hasOwnProperty()
+* #15 forEachKey() should ignore inherited properties
+
+## v1.2.2 (2015-10-15)
+
+* #11 NPM package shouldn't include any code that does `require('JSV')`
+* #12 jsl.node.conf missing definition for "module"
+
+## v1.2.1 (2015-10-14)
+
+* #8 odd date parsing behaviour
+
+## v1.2.0 (2015-10-13)
+
+* #9 want function for returning RFC1123 dates
+
+## v1.1.0 (2015-09-02)
+
+* #6 a new suite of hrtime manipulation routines: `hrtimeAdd()`,
+ `hrtimeAccum()`, `hrtimeNanosec()`, `hrtimeMicrosec()` and
+ `hrtimeMillisec()`.
+
+## v1.0.0 (2015-09-01)
+
+First tracked release. Includes everything in previous releases, plus:
+
+* #4 want function for merging objects
diff --git a/familyark/app/node_modules/jsprim/CONTRIBUTING.md b/familyark/app/node_modules/jsprim/CONTRIBUTING.md
new file mode 100644
index 0000000..750cef8
--- /dev/null
+++ b/familyark/app/node_modules/jsprim/CONTRIBUTING.md
@@ -0,0 +1,19 @@
+# Contributing
+
+This repository uses [cr.joyent.us](https://cr.joyent.us) (Gerrit) for new
+changes. Anyone can submit changes. To get started, see the [cr.joyent.us user
+guide](https://github.com/joyent/joyent-gerrit/blob/master/docs/user/README.md).
+This repo does not use GitHub pull requests.
+
+See the [Joyent Engineering
+Guidelines](https://github.com/joyent/eng/blob/master/docs/index.md) for general
+best practices expected in this repository.
+
+Contributions should be "make prepush" clean. The "prepush" target runs the
+"check" target, which requires these separate tools:
+
+* https://github.com/davepacheco/jsstyle
+* https://github.com/davepacheco/javascriptlint
+
+If you're changing something non-trivial or user-facing, you may want to submit
+an issue first.
diff --git a/familyark/app/node_modules/jsprim/LICENSE b/familyark/app/node_modules/jsprim/LICENSE
new file mode 100644
index 0000000..cbc0bb3
--- /dev/null
+++ b/familyark/app/node_modules/jsprim/LICENSE
@@ -0,0 +1,19 @@
+Copyright (c) 2012, Joyent, Inc. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE
diff --git a/familyark/app/node_modules/jsprim/README.md b/familyark/app/node_modules/jsprim/README.md
new file mode 100644
index 0000000..b3f28a4
--- /dev/null
+++ b/familyark/app/node_modules/jsprim/README.md
@@ -0,0 +1,287 @@
+# jsprim: utilities for primitive JavaScript types
+
+This module provides miscellaneous facilities for working with strings,
+numbers, dates, and objects and arrays of these basic types.
+
+
+### deepCopy(obj)
+
+Creates a deep copy of a primitive type, object, or array of primitive types.
+
+
+### deepEqual(obj1, obj2)
+
+Returns whether two objects are equal.
+
+
+### isEmpty(obj)
+
+Returns true if the given object has no properties and false otherwise. This
+is O(1) (unlike `Object.keys(obj).length === 0`, which is O(N)).
+
+### hasKey(obj, key)
+
+Returns true if the given object has an enumerable, non-inherited property
+called `key`. [For information on enumerability and ownership of properties, see
+the MDN
+documentation.](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Enumerability_and_ownership_of_properties)
+
+### forEachKey(obj, callback)
+
+Like Array.forEach, but iterates enumerable, owned properties of an object
+rather than elements of an array. Equivalent to:
+
+ for (var key in obj) {
+ if (Object.prototype.hasOwnProperty.call(obj, key)) {
+ callback(key, obj[key]);
+ }
+ }
+
+
+### flattenObject(obj, depth)
+
+Flattens an object up to a given level of nesting, returning an array of arrays
+of length "depth + 1", where the first "depth" elements correspond to flattened
+columns and the last element contains the remaining object . For example:
+
+ flattenObject({
+ 'I': {
+ 'A': {
+ 'i': {
+ 'datum1': [ 1, 2 ],
+ 'datum2': [ 3, 4 ]
+ },
+ 'ii': {
+ 'datum1': [ 3, 4 ]
+ }
+ },
+ 'B': {
+ 'i': {
+ 'datum1': [ 5, 6 ]
+ },
+ 'ii': {
+ 'datum1': [ 7, 8 ],
+ 'datum2': [ 3, 4 ],
+ },
+ 'iii': {
+ }
+ }
+ },
+ 'II': {
+ 'A': {
+ 'i': {
+ 'datum1': [ 1, 2 ],
+ 'datum2': [ 3, 4 ]
+ }
+ }
+ }
+ }, 3)
+
+becomes:
+
+ [
+ [ 'I', 'A', 'i', { 'datum1': [ 1, 2 ], 'datum2': [ 3, 4 ] } ],
+ [ 'I', 'A', 'ii', { 'datum1': [ 3, 4 ] } ],
+ [ 'I', 'B', 'i', { 'datum1': [ 5, 6 ] } ],
+ [ 'I', 'B', 'ii', { 'datum1': [ 7, 8 ], 'datum2': [ 3, 4 ] } ],
+ [ 'I', 'B', 'iii', {} ],
+ [ 'II', 'A', 'i', { 'datum1': [ 1, 2 ], 'datum2': [ 3, 4 ] } ]
+ ]
+
+This function is strict: "depth" must be a non-negative integer and "obj" must
+be a non-null object with at least "depth" levels of nesting under all keys.
+
+
+### flattenIter(obj, depth, func)
+
+This is similar to `flattenObject` except that instead of returning an array,
+this function invokes `func(entry)` for each `entry` in the array that
+`flattenObject` would return. `flattenIter(obj, depth, func)` is logically
+equivalent to `flattenObject(obj, depth).forEach(func)`. Importantly, this
+version never constructs the full array. Its memory usage is O(depth) rather
+than O(n) (where `n` is the number of flattened elements).
+
+There's another difference between `flattenObject` and `flattenIter` that's
+related to the special case where `depth === 0`. In this case, `flattenObject`
+omits the array wrapping `obj` (which is regrettable).
+
+
+### pluck(obj, key)
+
+Fetch nested property "key" from object "obj", traversing objects as needed.
+For example, `pluck(obj, "foo.bar.baz")` is roughly equivalent to
+`obj.foo.bar.baz`, except that:
+
+1. If traversal fails, the resulting value is undefined, and no error is
+ thrown. For example, `pluck({}, "foo.bar")` is just undefined.
+2. If "obj" has property "key" directly (without traversing), the
+ corresponding property is returned. For example,
+ `pluck({ 'foo.bar': 1 }, 'foo.bar')` is 1, not undefined. This is also
+ true recursively, so `pluck({ 'a': { 'foo.bar': 1 } }, 'a.foo.bar')` is
+ also 1, not undefined.
+
+
+### randElt(array)
+
+Returns an element from "array" selected uniformly at random. If "array" is
+empty, throws an Error.
+
+
+### startsWith(str, prefix)
+
+Returns true if the given string starts with the given prefix and false
+otherwise.
+
+
+### endsWith(str, suffix)
+
+Returns true if the given string ends with the given suffix and false
+otherwise.
+
+
+### parseInteger(str, options)
+
+Parses the contents of `str` (a string) as an integer. On success, the integer
+value is returned (as a number). On failure, an error is **returned** describing
+why parsing failed.
+
+By default, leading and trailing whitespace characters are not allowed, nor are
+trailing characters that are not part of the numeric representation. This
+behaviour can be toggled by using the options below. The empty string (`''`) is
+not considered valid input. If the return value cannot be precisely represented
+as a number (i.e., is smaller than `Number.MIN_SAFE_INTEGER` or larger than
+`Number.MAX_SAFE_INTEGER`), an error is returned. Additionally, the string
+`'-0'` will be parsed as the integer `0`, instead of as the IEEE floating point
+value `-0`.
+
+This function accepts both upper and lowercase characters for digits, similar to
+`parseInt()`, `Number()`, and [strtol(3C)](https://illumos.org/man/3C/strtol).
+
+The following may be specified in `options`:
+
+Option | Type | Default | Meaning
+------------------ | ------- | ------- | ---------------------------
+base | number | 10 | numeric base (radix) to use, in the range 2 to 36
+allowSign | boolean | true | whether to interpret any leading `+` (positive) and `-` (negative) characters
+allowImprecise | boolean | false | whether to accept values that may have lost precision (past `MAX_SAFE_INTEGER` or below `MIN_SAFE_INTEGER`)
+allowPrefix | boolean | false | whether to interpret the prefixes `0b` (base 2), `0o` (base 8), `0t` (base 10), or `0x` (base 16)
+allowTrailing | boolean | false | whether to ignore trailing characters
+trimWhitespace | boolean | false | whether to trim any leading or trailing whitespace/line terminators
+leadingZeroIsOctal | boolean | false | whether a leading zero indicates octal
+
+Note that if `base` is unspecified, and `allowPrefix` or `leadingZeroIsOctal`
+are, then the leading characters can change the default base from 10. If `base`
+is explicitly specified and `allowPrefix` is true, then the prefix will only be
+accepted if it matches the specified base. `base` and `leadingZeroIsOctal`
+cannot be used together.
+
+**Context:** It's tricky to parse integers with JavaScript's built-in facilities
+for several reasons:
+
+- `parseInt()` and `Number()` by default allow the base to be specified in the
+ input string by a prefix (e.g., `0x` for hex).
+- `parseInt()` allows trailing nonnumeric characters.
+- `Number(str)` returns 0 when `str` is the empty string (`''`).
+- Both functions return incorrect values when the input string represents a
+ valid integer outside the range of integers that can be represented precisely.
+ Specifically, `parseInt('9007199254740993')` returns 9007199254740992.
+- Both functions always accept `-` and `+` signs before the digit.
+- Some older JavaScript engines always interpret a leading 0 as indicating
+ octal, which can be surprising when parsing input from users who expect a
+ leading zero to be insignificant.
+
+While each of these may be desirable in some contexts, there are also times when
+none of them are wanted. `parseInteger()` grants greater control over what
+input's permissible.
+
+### iso8601(date)
+
+Converts a Date object to an ISO8601 date string of the form
+"YYYY-MM-DDTHH:MM:SS.sssZ". This format is not customizable.
+
+
+### parseDateTime(str)
+
+Parses a date expressed as a string, as either a number of milliseconds since
+the epoch or any string format that Date accepts, giving preference to the
+former where these two sets overlap (e.g., strings containing small numbers).
+
+
+### hrtimeDiff(timeA, timeB)
+
+Given two hrtime readings (as from Node's `process.hrtime()`), where timeA is
+later than timeB, compute the difference and return that as an hrtime. It is
+illegal to invoke this for a pair of times where timeB is newer than timeA.
+
+### hrtimeAdd(timeA, timeB)
+
+Add two hrtime intervals (as from Node's `process.hrtime()`), returning a new
+hrtime interval array. This function does not modify either input argument.
+
+
+### hrtimeAccum(timeA, timeB)
+
+Add two hrtime intervals (as from Node's `process.hrtime()`), storing the
+result in `timeA`. This function overwrites (and returns) the first argument
+passed in.
+
+
+### hrtimeNanosec(timeA), hrtimeMicrosec(timeA), hrtimeMillisec(timeA)
+
+This suite of functions converts a hrtime interval (as from Node's
+`process.hrtime()`) into a scalar number of nanoseconds, microseconds or
+milliseconds. Results are truncated, as with `Math.floor()`.
+
+
+### validateJsonObject(schema, object)
+
+Uses JSON validation (via JSV) to validate the given object against the given
+schema. On success, returns null. On failure, *returns* (does not throw) a
+useful Error object.
+
+
+### extraProperties(object, allowed)
+
+Check an object for unexpected properties. Accepts the object to check, and an
+array of allowed property name strings. If extra properties are detected, an
+array of extra property names is returned. If no properties other than those
+in the allowed list are present on the object, the returned array will be of
+zero length.
+
+### mergeObjects(provided, overrides, defaults)
+
+Merge properties from objects "provided", "overrides", and "defaults". The
+intended use case is for functions that accept named arguments in an "args"
+object, but want to provide some default values and override other values. In
+that case, "provided" is what the caller specified, "overrides" are what the
+function wants to override, and "defaults" contains default values.
+
+The function starts with the values in "defaults", overrides them with the
+values in "provided", and then overrides those with the values in "overrides".
+For convenience, any of these objects may be falsey, in which case they will be
+ignored. The input objects are never modified, but properties in the returned
+object are not deep-copied.
+
+For example:
+
+ mergeObjects(undefined, { 'objectMode': true }, { 'highWaterMark': 0 })
+
+returns:
+
+ { 'objectMode': true, 'highWaterMark': 0 }
+
+For another example:
+
+ mergeObjects(
+ { 'highWaterMark': 16, 'objectMode': 7 }, /* from caller */
+ { 'objectMode': true }, /* overrides */
+ { 'highWaterMark': 0 }); /* default */
+
+returns:
+
+ { 'objectMode': true, 'highWaterMark': 16 }
+
+
+# Contributing
+
+See separate [contribution guidelines](CONTRIBUTING.md).
diff --git a/familyark/app/node_modules/jsprim/lib/jsprim.js b/familyark/app/node_modules/jsprim/lib/jsprim.js
new file mode 100644
index 0000000..f7d0d81
--- /dev/null
+++ b/familyark/app/node_modules/jsprim/lib/jsprim.js
@@ -0,0 +1,735 @@
+/*
+ * lib/jsprim.js: utilities for primitive JavaScript types
+ */
+
+var mod_assert = require('assert-plus');
+var mod_util = require('util');
+
+var mod_extsprintf = require('extsprintf');
+var mod_verror = require('verror');
+var mod_jsonschema = require('json-schema');
+
+/*
+ * Public interface
+ */
+exports.deepCopy = deepCopy;
+exports.deepEqual = deepEqual;
+exports.isEmpty = isEmpty;
+exports.hasKey = hasKey;
+exports.forEachKey = forEachKey;
+exports.pluck = pluck;
+exports.flattenObject = flattenObject;
+exports.flattenIter = flattenIter;
+exports.validateJsonObject = validateJsonObjectJS;
+exports.validateJsonObjectJS = validateJsonObjectJS;
+exports.randElt = randElt;
+exports.extraProperties = extraProperties;
+exports.mergeObjects = mergeObjects;
+
+exports.startsWith = startsWith;
+exports.endsWith = endsWith;
+
+exports.parseInteger = parseInteger;
+
+exports.iso8601 = iso8601;
+exports.rfc1123 = rfc1123;
+exports.parseDateTime = parseDateTime;
+
+exports.hrtimediff = hrtimeDiff;
+exports.hrtimeDiff = hrtimeDiff;
+exports.hrtimeAccum = hrtimeAccum;
+exports.hrtimeAdd = hrtimeAdd;
+exports.hrtimeNanosec = hrtimeNanosec;
+exports.hrtimeMicrosec = hrtimeMicrosec;
+exports.hrtimeMillisec = hrtimeMillisec;
+
+
+/*
+ * Deep copy an acyclic *basic* Javascript object. This only handles basic
+ * scalars (strings, numbers, booleans) and arbitrarily deep arrays and objects
+ * containing these. This does *not* handle instances of other classes.
+ */
+function deepCopy(obj)
+{
+ var ret, key;
+ var marker = '__deepCopy';
+
+ if (obj && obj[marker])
+ throw (new Error('attempted deep copy of cyclic object'));
+
+ if (obj && obj.constructor == Object) {
+ ret = {};
+ obj[marker] = true;
+
+ for (key in obj) {
+ if (key == marker)
+ continue;
+
+ ret[key] = deepCopy(obj[key]);
+ }
+
+ delete (obj[marker]);
+ return (ret);
+ }
+
+ if (obj && obj.constructor == Array) {
+ ret = [];
+ obj[marker] = true;
+
+ for (key = 0; key < obj.length; key++)
+ ret.push(deepCopy(obj[key]));
+
+ delete (obj[marker]);
+ return (ret);
+ }
+
+ /*
+ * It must be a primitive type -- just return it.
+ */
+ return (obj);
+}
+
+function deepEqual(obj1, obj2)
+{
+ if (typeof (obj1) != typeof (obj2))
+ return (false);
+
+ if (obj1 === null || obj2 === null || typeof (obj1) != 'object')
+ return (obj1 === obj2);
+
+ if (obj1.constructor != obj2.constructor)
+ return (false);
+
+ var k;
+ for (k in obj1) {
+ if (!obj2.hasOwnProperty(k))
+ return (false);
+
+ if (!deepEqual(obj1[k], obj2[k]))
+ return (false);
+ }
+
+ for (k in obj2) {
+ if (!obj1.hasOwnProperty(k))
+ return (false);
+ }
+
+ return (true);
+}
+
+function isEmpty(obj)
+{
+ var key;
+ for (key in obj)
+ return (false);
+ return (true);
+}
+
+function hasKey(obj, key)
+{
+ mod_assert.equal(typeof (key), 'string');
+ return (Object.prototype.hasOwnProperty.call(obj, key));
+}
+
+function forEachKey(obj, callback)
+{
+ for (var key in obj) {
+ if (hasKey(obj, key)) {
+ callback(key, obj[key]);
+ }
+ }
+}
+
+function pluck(obj, key)
+{
+ mod_assert.equal(typeof (key), 'string');
+ return (pluckv(obj, key));
+}
+
+function pluckv(obj, key)
+{
+ if (obj === null || typeof (obj) !== 'object')
+ return (undefined);
+
+ if (obj.hasOwnProperty(key))
+ return (obj[key]);
+
+ var i = key.indexOf('.');
+ if (i == -1)
+ return (undefined);
+
+ var key1 = key.substr(0, i);
+ if (!obj.hasOwnProperty(key1))
+ return (undefined);
+
+ return (pluckv(obj[key1], key.substr(i + 1)));
+}
+
+/*
+ * Invoke callback(row) for each entry in the array that would be returned by
+ * flattenObject(data, depth). This is just like flattenObject(data,
+ * depth).forEach(callback), except that the intermediate array is never
+ * created.
+ */
+function flattenIter(data, depth, callback)
+{
+ doFlattenIter(data, depth, [], callback);
+}
+
+function doFlattenIter(data, depth, accum, callback)
+{
+ var each;
+ var key;
+
+ if (depth === 0) {
+ each = accum.slice(0);
+ each.push(data);
+ callback(each);
+ return;
+ }
+
+ mod_assert.ok(data !== null);
+ mod_assert.equal(typeof (data), 'object');
+ mod_assert.equal(typeof (depth), 'number');
+ mod_assert.ok(depth >= 0);
+
+ for (key in data) {
+ each = accum.slice(0);
+ each.push(key);
+ doFlattenIter(data[key], depth - 1, each, callback);
+ }
+}
+
+function flattenObject(data, depth)
+{
+ if (depth === 0)
+ return ([ data ]);
+
+ mod_assert.ok(data !== null);
+ mod_assert.equal(typeof (data), 'object');
+ mod_assert.equal(typeof (depth), 'number');
+ mod_assert.ok(depth >= 0);
+
+ var rv = [];
+ var key;
+
+ for (key in data) {
+ flattenObject(data[key], depth - 1).forEach(function (p) {
+ rv.push([ key ].concat(p));
+ });
+ }
+
+ return (rv);
+}
+
+function startsWith(str, prefix)
+{
+ return (str.substr(0, prefix.length) == prefix);
+}
+
+function endsWith(str, suffix)
+{
+ return (str.substr(
+ str.length - suffix.length, suffix.length) == suffix);
+}
+
+function iso8601(d)
+{
+ if (typeof (d) == 'number')
+ d = new Date(d);
+ mod_assert.ok(d.constructor === Date);
+ return (mod_extsprintf.sprintf('%4d-%02d-%02dT%02d:%02d:%02d.%03dZ',
+ d.getUTCFullYear(), d.getUTCMonth() + 1, d.getUTCDate(),
+ d.getUTCHours(), d.getUTCMinutes(), d.getUTCSeconds(),
+ d.getUTCMilliseconds()));
+}
+
+var RFC1123_MONTHS = [
+ 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+ 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
+var RFC1123_DAYS = [
+ 'Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
+
+function rfc1123(date) {
+ return (mod_extsprintf.sprintf('%s, %02d %s %04d %02d:%02d:%02d GMT',
+ RFC1123_DAYS[date.getUTCDay()], date.getUTCDate(),
+ RFC1123_MONTHS[date.getUTCMonth()], date.getUTCFullYear(),
+ date.getUTCHours(), date.getUTCMinutes(),
+ date.getUTCSeconds()));
+}
+
+/*
+ * Parses a date expressed as a string, as either a number of milliseconds since
+ * the epoch or any string format that Date accepts, giving preference to the
+ * former where these two sets overlap (e.g., small numbers).
+ */
+function parseDateTime(str)
+{
+ /*
+ * This is irritatingly implicit, but significantly more concise than
+ * alternatives. The "+str" will convert a string containing only a
+ * number directly to a Number, or NaN for other strings. Thus, if the
+ * conversion succeeds, we use it (this is the milliseconds-since-epoch
+ * case). Otherwise, we pass the string directly to the Date
+ * constructor to parse.
+ */
+ var numeric = +str;
+ if (!isNaN(numeric)) {
+ return (new Date(numeric));
+ } else {
+ return (new Date(str));
+ }
+}
+
+
+/*
+ * Number.*_SAFE_INTEGER isn't present before node v0.12, so we hardcode
+ * the ES6 definitions here, while allowing for them to someday be higher.
+ */
+var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || 9007199254740991;
+var MIN_SAFE_INTEGER = Number.MIN_SAFE_INTEGER || -9007199254740991;
+
+
+/*
+ * Default options for parseInteger().
+ */
+var PI_DEFAULTS = {
+ base: 10,
+ allowSign: true,
+ allowPrefix: false,
+ allowTrailing: false,
+ allowImprecise: false,
+ trimWhitespace: false,
+ leadingZeroIsOctal: false
+};
+
+var CP_0 = 0x30;
+var CP_9 = 0x39;
+
+var CP_A = 0x41;
+var CP_B = 0x42;
+var CP_O = 0x4f;
+var CP_T = 0x54;
+var CP_X = 0x58;
+var CP_Z = 0x5a;
+
+var CP_a = 0x61;
+var CP_b = 0x62;
+var CP_o = 0x6f;
+var CP_t = 0x74;
+var CP_x = 0x78;
+var CP_z = 0x7a;
+
+var PI_CONV_DEC = 0x30;
+var PI_CONV_UC = 0x37;
+var PI_CONV_LC = 0x57;
+
+
+/*
+ * A stricter version of parseInt() that provides options for changing what
+ * is an acceptable string (for example, disallowing trailing characters).
+ */
+function parseInteger(str, uopts)
+{
+ mod_assert.string(str, 'str');
+ mod_assert.optionalObject(uopts, 'options');
+
+ var baseOverride = false;
+ var options = PI_DEFAULTS;
+
+ if (uopts) {
+ baseOverride = hasKey(uopts, 'base');
+ options = mergeObjects(options, uopts);
+ mod_assert.number(options.base, 'options.base');
+ mod_assert.ok(options.base >= 2, 'options.base >= 2');
+ mod_assert.ok(options.base <= 36, 'options.base <= 36');
+ mod_assert.bool(options.allowSign, 'options.allowSign');
+ mod_assert.bool(options.allowPrefix, 'options.allowPrefix');
+ mod_assert.bool(options.allowTrailing,
+ 'options.allowTrailing');
+ mod_assert.bool(options.allowImprecise,
+ 'options.allowImprecise');
+ mod_assert.bool(options.trimWhitespace,
+ 'options.trimWhitespace');
+ mod_assert.bool(options.leadingZeroIsOctal,
+ 'options.leadingZeroIsOctal');
+
+ if (options.leadingZeroIsOctal) {
+ mod_assert.ok(!baseOverride,
+ '"base" and "leadingZeroIsOctal" are ' +
+ 'mutually exclusive');
+ }
+ }
+
+ var c;
+ var pbase = -1;
+ var base = options.base;
+ var start;
+ var mult = 1;
+ var value = 0;
+ var idx = 0;
+ var len = str.length;
+
+ /* Trim any whitespace on the left side. */
+ if (options.trimWhitespace) {
+ while (idx < len && isSpace(str.charCodeAt(idx))) {
+ ++idx;
+ }
+ }
+
+ /* Check the number for a leading sign. */
+ if (options.allowSign) {
+ if (str[idx] === '-') {
+ idx += 1;
+ mult = -1;
+ } else if (str[idx] === '+') {
+ idx += 1;
+ }
+ }
+
+ /* Parse the base-indicating prefix if there is one. */
+ if (str[idx] === '0') {
+ if (options.allowPrefix) {
+ pbase = prefixToBase(str.charCodeAt(idx + 1));
+ if (pbase !== -1 && (!baseOverride || pbase === base)) {
+ base = pbase;
+ idx += 2;
+ }
+ }
+
+ if (pbase === -1 && options.leadingZeroIsOctal) {
+ base = 8;
+ }
+ }
+
+ /* Parse the actual digits. */
+ for (start = idx; idx < len; ++idx) {
+ c = translateDigit(str.charCodeAt(idx));
+ if (c !== -1 && c < base) {
+ value *= base;
+ value += c;
+ } else {
+ break;
+ }
+ }
+
+ /* If we didn't parse any digits, we have an invalid number. */
+ if (start === idx) {
+ return (new Error('invalid number: ' + JSON.stringify(str)));
+ }
+
+ /* Trim any whitespace on the right side. */
+ if (options.trimWhitespace) {
+ while (idx < len && isSpace(str.charCodeAt(idx))) {
+ ++idx;
+ }
+ }
+
+ /* Check for trailing characters. */
+ if (idx < len && !options.allowTrailing) {
+ return (new Error('trailing characters after number: ' +
+ JSON.stringify(str.slice(idx))));
+ }
+
+ /* If our value is 0, we return now, to avoid returning -0. */
+ if (value === 0) {
+ return (0);
+ }
+
+ /* Calculate our final value. */
+ var result = value * mult;
+
+ /*
+ * If the string represents a value that cannot be precisely represented
+ * by JavaScript, then we want to check that:
+ *
+ * - We never increased the value past MAX_SAFE_INTEGER
+ * - We don't make the result negative and below MIN_SAFE_INTEGER
+ *
+ * Because we only ever increment the value during parsing, there's no
+ * chance of moving past MAX_SAFE_INTEGER and then dropping below it
+ * again, losing precision in the process. This means that we only need
+ * to do our checks here, at the end.
+ */
+ if (!options.allowImprecise &&
+ (value > MAX_SAFE_INTEGER || result < MIN_SAFE_INTEGER)) {
+ return (new Error('number is outside of the supported range: ' +
+ JSON.stringify(str.slice(start, idx))));
+ }
+
+ return (result);
+}
+
+
+/*
+ * Interpret a character code as a base-36 digit.
+ */
+function translateDigit(d)
+{
+ if (d >= CP_0 && d <= CP_9) {
+ /* '0' to '9' -> 0 to 9 */
+ return (d - PI_CONV_DEC);
+ } else if (d >= CP_A && d <= CP_Z) {
+ /* 'A' - 'Z' -> 10 to 35 */
+ return (d - PI_CONV_UC);
+ } else if (d >= CP_a && d <= CP_z) {
+ /* 'a' - 'z' -> 10 to 35 */
+ return (d - PI_CONV_LC);
+ } else {
+ /* Invalid character code */
+ return (-1);
+ }
+}
+
+
+/*
+ * Test if a value matches the ECMAScript definition of trimmable whitespace.
+ */
+function isSpace(c)
+{
+ return (c === 0x20) ||
+ (c >= 0x0009 && c <= 0x000d) ||
+ (c === 0x00a0) ||
+ (c === 0x1680) ||
+ (c === 0x180e) ||
+ (c >= 0x2000 && c <= 0x200a) ||
+ (c === 0x2028) ||
+ (c === 0x2029) ||
+ (c === 0x202f) ||
+ (c === 0x205f) ||
+ (c === 0x3000) ||
+ (c === 0xfeff);
+}
+
+
+/*
+ * Determine which base a character indicates (e.g., 'x' indicates hex).
+ */
+function prefixToBase(c)
+{
+ if (c === CP_b || c === CP_B) {
+ /* 0b/0B (binary) */
+ return (2);
+ } else if (c === CP_o || c === CP_O) {
+ /* 0o/0O (octal) */
+ return (8);
+ } else if (c === CP_t || c === CP_T) {
+ /* 0t/0T (decimal) */
+ return (10);
+ } else if (c === CP_x || c === CP_X) {
+ /* 0x/0X (hexadecimal) */
+ return (16);
+ } else {
+ /* Not a meaningful character */
+ return (-1);
+ }
+}
+
+
+function validateJsonObjectJS(schema, input)
+{
+ var report = mod_jsonschema.validate(input, schema);
+
+ if (report.errors.length === 0)
+ return (null);
+
+ /* Currently, we only do anything useful with the first error. */
+ var error = report.errors[0];
+
+ /* The failed property is given by a URI with an irrelevant prefix. */
+ var propname = error['property'];
+ var reason = error['message'].toLowerCase();
+ var i, j;
+
+ /*
+ * There's at least one case where the property error message is
+ * confusing at best. We work around this here.
+ */
+ if ((i = reason.indexOf('the property ')) != -1 &&
+ (j = reason.indexOf(' is not defined in the schema and the ' +
+ 'schema does not allow additional properties')) != -1) {
+ i += 'the property '.length;
+ if (propname === '')
+ propname = reason.substr(i, j - i);
+ else
+ propname = propname + '.' + reason.substr(i, j - i);
+
+ reason = 'unsupported property';
+ }
+
+ var rv = new mod_verror.VError('property "%s": %s', propname, reason);
+ rv.jsv_details = error;
+ return (rv);
+}
+
+function randElt(arr)
+{
+ mod_assert.ok(Array.isArray(arr) && arr.length > 0,
+ 'randElt argument must be a non-empty array');
+
+ return (arr[Math.floor(Math.random() * arr.length)]);
+}
+
+function assertHrtime(a)
+{
+ mod_assert.ok(a[0] >= 0 && a[1] >= 0,
+ 'negative numbers not allowed in hrtimes');
+ mod_assert.ok(a[1] < 1e9, 'nanoseconds column overflow');
+}
+
+/*
+ * Compute the time elapsed between hrtime readings A and B, where A is later
+ * than B. hrtime readings come from Node's process.hrtime(). There is no
+ * defined way to represent negative deltas, so it's illegal to diff B from A
+ * where the time denoted by B is later than the time denoted by A. If this
+ * becomes valuable, we can define a representation and extend the
+ * implementation to support it.
+ */
+function hrtimeDiff(a, b)
+{
+ assertHrtime(a);
+ assertHrtime(b);
+ mod_assert.ok(a[0] > b[0] || (a[0] == b[0] && a[1] >= b[1]),
+ 'negative differences not allowed');
+
+ var rv = [ a[0] - b[0], 0 ];
+
+ if (a[1] >= b[1]) {
+ rv[1] = a[1] - b[1];
+ } else {
+ rv[0]--;
+ rv[1] = 1e9 - (b[1] - a[1]);
+ }
+
+ return (rv);
+}
+
+/*
+ * Convert a hrtime reading from the array format returned by Node's
+ * process.hrtime() into a scalar number of nanoseconds.
+ */
+function hrtimeNanosec(a)
+{
+ assertHrtime(a);
+
+ return (Math.floor(a[0] * 1e9 + a[1]));
+}
+
+/*
+ * Convert a hrtime reading from the array format returned by Node's
+ * process.hrtime() into a scalar number of microseconds.
+ */
+function hrtimeMicrosec(a)
+{
+ assertHrtime(a);
+
+ return (Math.floor(a[0] * 1e6 + a[1] / 1e3));
+}
+
+/*
+ * Convert a hrtime reading from the array format returned by Node's
+ * process.hrtime() into a scalar number of milliseconds.
+ */
+function hrtimeMillisec(a)
+{
+ assertHrtime(a);
+
+ return (Math.floor(a[0] * 1e3 + a[1] / 1e6));
+}
+
+/*
+ * Add two hrtime readings A and B, overwriting A with the result of the
+ * addition. This function is useful for accumulating several hrtime intervals
+ * into a counter. Returns A.
+ */
+function hrtimeAccum(a, b)
+{
+ assertHrtime(a);
+ assertHrtime(b);
+
+ /*
+ * Accumulate the nanosecond component.
+ */
+ a[1] += b[1];
+ if (a[1] >= 1e9) {
+ /*
+ * The nanosecond component overflowed, so carry to the seconds
+ * field.
+ */
+ a[0]++;
+ a[1] -= 1e9;
+ }
+
+ /*
+ * Accumulate the seconds component.
+ */
+ a[0] += b[0];
+
+ return (a);
+}
+
+/*
+ * Add two hrtime readings A and B, returning the result as a new hrtime array.
+ * Does not modify either input argument.
+ */
+function hrtimeAdd(a, b)
+{
+ assertHrtime(a);
+
+ var rv = [ a[0], a[1] ];
+
+ return (hrtimeAccum(rv, b));
+}
+
+
+/*
+ * Check an object for unexpected properties. Accepts the object to check, and
+ * an array of allowed property names (strings). Returns an array of key names
+ * that were found on the object, but did not appear in the list of allowed
+ * properties. If no properties were found, the returned array will be of
+ * zero length.
+ */
+function extraProperties(obj, allowed)
+{
+ mod_assert.ok(typeof (obj) === 'object' && obj !== null,
+ 'obj argument must be a non-null object');
+ mod_assert.ok(Array.isArray(allowed),
+ 'allowed argument must be an array of strings');
+ for (var i = 0; i < allowed.length; i++) {
+ mod_assert.ok(typeof (allowed[i]) === 'string',
+ 'allowed argument must be an array of strings');
+ }
+
+ return (Object.keys(obj).filter(function (key) {
+ return (allowed.indexOf(key) === -1);
+ }));
+}
+
+/*
+ * Given three sets of properties "provided" (may be undefined), "overrides"
+ * (required), and "defaults" (may be undefined), construct an object containing
+ * the union of these sets with "overrides" overriding "provided", and
+ * "provided" overriding "defaults". None of the input objects are modified.
+ */
+function mergeObjects(provided, overrides, defaults)
+{
+ var rv, k;
+
+ rv = {};
+ if (defaults) {
+ for (k in defaults)
+ rv[k] = defaults[k];
+ }
+
+ if (provided) {
+ for (k in provided)
+ rv[k] = provided[k];
+ }
+
+ if (overrides) {
+ for (k in overrides)
+ rv[k] = overrides[k];
+ }
+
+ return (rv);
+}
diff --git a/familyark/app/node_modules/jsprim/package.json b/familyark/app/node_modules/jsprim/package.json
new file mode 100644
index 0000000..c75eb3b
--- /dev/null
+++ b/familyark/app/node_modules/jsprim/package.json
@@ -0,0 +1,49 @@
+{
+ "_from": "jsprim@^1.2.2",
+ "_id": "jsprim@1.4.1",
+ "_inBundle": false,
+ "_integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=",
+ "_location": "/jsprim",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "jsprim@^1.2.2",
+ "name": "jsprim",
+ "escapedName": "jsprim",
+ "rawSpec": "^1.2.2",
+ "saveSpec": null,
+ "fetchSpec": "^1.2.2"
+ },
+ "_requiredBy": [
+ "/http-signature"
+ ],
+ "_resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz",
+ "_shasum": "313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2",
+ "_spec": "jsprim@^1.2.2",
+ "_where": "/home/g/Workspace/hatthieves/familyARK/familyark/app/node_modules/http-signature",
+ "bugs": {
+ "url": "https://github.com/joyent/node-jsprim/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {
+ "assert-plus": "1.0.0",
+ "extsprintf": "1.3.0",
+ "json-schema": "0.2.3",
+ "verror": "1.10.0"
+ },
+ "deprecated": false,
+ "description": "utilities for primitive JavaScript types",
+ "engines": [
+ "node >=0.6.0"
+ ],
+ "homepage": "https://github.com/joyent/node-jsprim#readme",
+ "license": "MIT",
+ "main": "./lib/jsprim.js",
+ "name": "jsprim",
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/joyent/node-jsprim.git"
+ },
+ "version": "1.4.1"
+}
diff --git a/familyark/app/node_modules/oauth-sign/LICENSE b/familyark/app/node_modules/oauth-sign/LICENSE
new file mode 100644
index 0000000..a4a9aee
--- /dev/null
+++ b/familyark/app/node_modules/oauth-sign/LICENSE
@@ -0,0 +1,55 @@
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
+If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
\ No newline at end of file
diff --git a/familyark/app/node_modules/oauth-sign/README.md b/familyark/app/node_modules/oauth-sign/README.md
new file mode 100644
index 0000000..549cbba
--- /dev/null
+++ b/familyark/app/node_modules/oauth-sign/README.md
@@ -0,0 +1,11 @@
+oauth-sign
+==========
+
+OAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module.
+
+## Supported Method Signatures
+
+- HMAC-SHA1
+- HMAC-SHA256
+- RSA-SHA1
+- PLAINTEXT
\ No newline at end of file
diff --git a/familyark/app/node_modules/oauth-sign/index.js b/familyark/app/node_modules/oauth-sign/index.js
new file mode 100644
index 0000000..6482f77
--- /dev/null
+++ b/familyark/app/node_modules/oauth-sign/index.js
@@ -0,0 +1,146 @@
+var crypto = require('crypto')
+
+function sha (key, body, algorithm) {
+ return crypto.createHmac(algorithm, key).update(body).digest('base64')
+}
+
+function rsa (key, body) {
+ return crypto.createSign('RSA-SHA1').update(body).sign(key, 'base64')
+}
+
+function rfc3986 (str) {
+ return encodeURIComponent(str)
+ .replace(/!/g,'%21')
+ .replace(/\*/g,'%2A')
+ .replace(/\(/g,'%28')
+ .replace(/\)/g,'%29')
+ .replace(/'/g,'%27')
+}
+
+// Maps object to bi-dimensional array
+// Converts { foo: 'A', bar: [ 'b', 'B' ]} to
+// [ ['foo', 'A'], ['bar', 'b'], ['bar', 'B'] ]
+function map (obj) {
+ var key, val, arr = []
+ for (key in obj) {
+ val = obj[key]
+ if (Array.isArray(val))
+ for (var i = 0; i < val.length; i++)
+ arr.push([key, val[i]])
+ else if (typeof val === 'object')
+ for (var prop in val)
+ arr.push([key + '[' + prop + ']', val[prop]])
+ else
+ arr.push([key, val])
+ }
+ return arr
+}
+
+// Compare function for sort
+function compare (a, b) {
+ return a > b ? 1 : a < b ? -1 : 0
+}
+
+function generateBase (httpMethod, base_uri, params) {
+ // adapted from https://dev.twitter.com/docs/auth/oauth and
+ // https://dev.twitter.com/docs/auth/creating-signature
+
+ // Parameter normalization
+ // http://tools.ietf.org/html/rfc5849#section-3.4.1.3.2
+ var normalized = map(params)
+ // 1. First, the name and value of each parameter are encoded
+ .map(function (p) {
+ return [ rfc3986(p[0]), rfc3986(p[1] || '') ]
+ })
+ // 2. The parameters are sorted by name, using ascending byte value
+ // ordering. If two or more parameters share the same name, they
+ // are sorted by their value.
+ .sort(function (a, b) {
+ return compare(a[0], b[0]) || compare(a[1], b[1])
+ })
+ // 3. The name of each parameter is concatenated to its corresponding
+ // value using an "=" character (ASCII code 61) as a separator, even
+ // if the value is empty.
+ .map(function (p) { return p.join('=') })
+ // 4. The sorted name/value pairs are concatenated together into a
+ // single string by using an "&" character (ASCII code 38) as
+ // separator.
+ .join('&')
+
+ var base = [
+ rfc3986(httpMethod ? httpMethod.toUpperCase() : 'GET'),
+ rfc3986(base_uri),
+ rfc3986(normalized)
+ ].join('&')
+
+ return base
+}
+
+function hmacsign (httpMethod, base_uri, params, consumer_secret, token_secret) {
+ var base = generateBase(httpMethod, base_uri, params)
+ var key = [
+ consumer_secret || '',
+ token_secret || ''
+ ].map(rfc3986).join('&')
+
+ return sha(key, base, 'sha1')
+}
+
+function hmacsign256 (httpMethod, base_uri, params, consumer_secret, token_secret) {
+ var base = generateBase(httpMethod, base_uri, params)
+ var key = [
+ consumer_secret || '',
+ token_secret || ''
+ ].map(rfc3986).join('&')
+
+ return sha(key, base, 'sha256')
+}
+
+function rsasign (httpMethod, base_uri, params, private_key, token_secret) {
+ var base = generateBase(httpMethod, base_uri, params)
+ var key = private_key || ''
+
+ return rsa(key, base)
+}
+
+function plaintext (consumer_secret, token_secret) {
+ var key = [
+ consumer_secret || '',
+ token_secret || ''
+ ].map(rfc3986).join('&')
+
+ return key
+}
+
+function sign (signMethod, httpMethod, base_uri, params, consumer_secret, token_secret) {
+ var method
+ var skipArgs = 1
+
+ switch (signMethod) {
+ case 'RSA-SHA1':
+ method = rsasign
+ break
+ case 'HMAC-SHA1':
+ method = hmacsign
+ break
+ case 'HMAC-SHA256':
+ method = hmacsign256
+ break
+ case 'PLAINTEXT':
+ method = plaintext
+ skipArgs = 4
+ break
+ default:
+ throw new Error('Signature method not supported: ' + signMethod)
+ }
+
+ return method.apply(null, [].slice.call(arguments, skipArgs))
+}
+
+exports.hmacsign = hmacsign
+exports.hmacsign256 = hmacsign256
+exports.rsasign = rsasign
+exports.plaintext = plaintext
+exports.sign = sign
+exports.rfc3986 = rfc3986
+exports.generateBase = generateBase
\ No newline at end of file
diff --git a/familyark/app/node_modules/oauth-sign/package.json b/familyark/app/node_modules/oauth-sign/package.json
new file mode 100644
index 0000000..c1b8dc0
--- /dev/null
+++ b/familyark/app/node_modules/oauth-sign/package.json
@@ -0,0 +1,56 @@
+{
+ "_from": "oauth-sign@~0.9.0",
+ "_id": "oauth-sign@0.9.0",
+ "_inBundle": false,
+ "_integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==",
+ "_location": "/oauth-sign",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "oauth-sign@~0.9.0",
+ "name": "oauth-sign",
+ "escapedName": "oauth-sign",
+ "rawSpec": "~0.9.0",
+ "saveSpec": null,
+ "fetchSpec": "~0.9.0"
+ },
+ "_requiredBy": [
+ "/request"
+ ],
+ "_resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz",
+ "_shasum": "47a7b016baa68b5fa0ecf3dee08a85c679ac6455",
+ "_spec": "oauth-sign@~0.9.0",
+ "_where": "/home/g/Workspace/hatthieves/familyARK/familyark/app/node_modules/request",
+ "author": {
+ "name": "Mikeal Rogers",
+ "email": "mikeal.rogers@gmail.com",
+ "url": "http://www.futurealoof.com"
+ },
+ "bugs": {
+ "url": "https://github.com/mikeal/oauth-sign/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {},
+ "deprecated": false,
+ "description": "OAuth 1 signing. Formerly a vendor lib in mikeal/request, now a standalone module.",
+ "devDependencies": {},
+ "engines": {
+ "node": "*"
+ },
+ "files": [
+ "index.js"
+ ],
+ "homepage": "https://github.com/mikeal/oauth-sign#readme",
+ "license": "Apache-2.0",
+ "main": "index.js",
+ "name": "oauth-sign",
+ "optionalDependencies": {},
+ "repository": {
+ "url": "git+https://github.com/mikeal/oauth-sign.git"
+ },
+ "scripts": {
+ "test": "node test.js"
+ },
+ "version": "0.9.0"
+}
diff --git a/familyark/app/node_modules/performance-now/.npmignore b/familyark/app/node_modules/performance-now/.npmignore
new file mode 100644
index 0000000..496ee2c
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/.npmignore
@@ -0,0 +1 @@
+.DS_Store
\ No newline at end of file
diff --git a/familyark/app/node_modules/performance-now/.tm_properties b/familyark/app/node_modules/performance-now/.tm_properties
new file mode 100644
index 0000000..4b8eb3f
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/.tm_properties
@@ -0,0 +1,7 @@
+excludeDirectories = "{.git,node_modules}"
+excludeInFolderSearch = "{excludeDirectories,lib}"
+
+includeFiles = "{.gitignore,.npmignore,.travis.yml}"
+
+[ attr.untitled ]
+fileType = 'source.coffee'
\ No newline at end of file
diff --git a/familyark/app/node_modules/performance-now/.travis.yml b/familyark/app/node_modules/performance-now/.travis.yml
new file mode 100644
index 0000000..1543c19
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/.travis.yml
@@ -0,0 +1,6 @@
+language: node_js
+node_js:
+ - "node"
+ - "6"
+ - "4"
+ - "0.12"
diff --git a/familyark/app/node_modules/performance-now/README.md b/familyark/app/node_modules/performance-now/README.md
new file mode 100644
index 0000000..28080f8
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/README.md
@@ -0,0 +1,30 @@
+# performance-now [](https://travis-ci.org/braveg1rl/performance-now) [](https://david-dm.org/braveg1rl/performance-now)
+
+Implements a function similar to `performance.now` (based on `process.hrtime`).
+
+Modern browsers have a `window.performance` object with - among others - a `now` method which gives time in milliseconds, but with sub-millisecond precision. This module offers the same function based on the Node.js native `process.hrtime` function.
+
+Using `process.hrtime` means that the reported time will be monotonically increasing, and not subject to clock-drift.
+
+According to the [High Resolution Time specification](http://www.w3.org/TR/hr-time/), the number of milliseconds reported by `performance.now` should be relative to the value of `performance.timing.navigationStart`.
+
+In the current version of the module (2.0) the reported time is relative to the time the current Node process has started (inferred from `process.uptime()`).
+
+Version 1.0 reported a different time. The reported time was relative to the time the module was loaded (i.e. the time it was first `require`d). If you need this functionality, version 1.0 is still available on NPM.
+
+## Example usage
+
+```javascript
+var now = require("performance-now")
+var start = now()
+var end = now()
+console.log(start.toFixed(3)) // the number of milliseconds the current node process is running
+console.log((start-end).toFixed(3)) // ~ 0.002 on my system
+```
+
+Running the now function two times right after each other yields a time difference of a few microseconds. Given this overhead, I think it's best to assume that the precision of intervals computed with this method is not higher than 10 microseconds, if you don't know the exact overhead on your own system.
+
+## License
+
+performance-now is released under the [MIT License](http://opensource.org/licenses/MIT).
+Copyright (c) 2017 Braveg1rl
diff --git a/familyark/app/node_modules/performance-now/lib/performance-now.js b/familyark/app/node_modules/performance-now/lib/performance-now.js
new file mode 100644
index 0000000..37f569d
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/lib/performance-now.js
@@ -0,0 +1,36 @@
+// Generated by CoffeeScript 1.12.2
+(function() {
+ var getNanoSeconds, hrtime, loadTime, moduleLoadTime, nodeLoadTime, upTime;
+
+ if ((typeof performance !== "undefined" && performance !== null) && performance.now) {
+ module.exports = function() {
+ return performance.now();
+ };
+ } else if ((typeof process !== "undefined" && process !== null) && process.hrtime) {
+ module.exports = function() {
+ return (getNanoSeconds() - nodeLoadTime) / 1e6;
+ };
+ hrtime = process.hrtime;
+ getNanoSeconds = function() {
+ var hr;
+ hr = hrtime();
+ return hr[0] * 1e9 + hr[1];
+ };
+ moduleLoadTime = getNanoSeconds();
+ upTime = process.uptime() * 1e9;
+ nodeLoadTime = moduleLoadTime - upTime;
+ } else if (Date.now) {
+ module.exports = function() {
+ return Date.now() - loadTime;
+ };
+ loadTime = Date.now();
+ } else {
+ module.exports = function() {
+ return new Date().getTime() - loadTime;
+ };
+ loadTime = new Date().getTime();
+ }
+
+}).call(this);
+
+//# sourceMappingURL=performance-now.js.map
diff --git a/familyark/app/node_modules/performance-now/lib/performance-now.js.map b/familyark/app/node_modules/performance-now/lib/performance-now.js.map
new file mode 100644
index 0000000..bef8362
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/lib/performance-now.js.map
@@ -0,0 +1,10 @@
+{
+ "version": 3,
+ "file": "performance-now.js",
+ "sourceRoot": "..",
+ "sources": [
+ "src/performance-now.coffee"
+ ],
+ "names": [],
+ "mappings": ";AAAA;AAAA,MAAA;;EAAA,IAAG,4DAAA,IAAiB,WAAW,CAAC,GAAhC;IACE,MAAM,CAAC,OAAP,GAAiB,SAAA;aAAG,WAAW,CAAC,GAAZ,CAAA;IAAH,EADnB;GAAA,MAEK,IAAG,oDAAA,IAAa,OAAO,CAAC,MAAxB;IACH,MAAM,CAAC,OAAP,GAAiB,SAAA;aAAG,CAAC,cAAA,CAAA,CAAA,GAAmB,YAApB,CAAA,GAAoC;IAAvC;IACjB,MAAA,GAAS,OAAO,CAAC;IACjB,cAAA,GAAiB,SAAA;AACf,UAAA;MAAA,EAAA,GAAK,MAAA,CAAA;aACL,EAAG,CAAA,CAAA,CAAH,GAAQ,GAAR,GAAc,EAAG,CAAA,CAAA;IAFF;IAGjB,cAAA,GAAiB,cAAA,CAAA;IACjB,MAAA,GAAS,OAAO,CAAC,MAAR,CAAA,CAAA,GAAmB;IAC5B,YAAA,GAAe,cAAA,GAAiB,OAR7B;GAAA,MASA,IAAG,IAAI,CAAC,GAAR;IACH,MAAM,CAAC,OAAP,GAAiB,SAAA;aAAG,IAAI,CAAC,GAAL,CAAA,CAAA,GAAa;IAAhB;IACjB,QAAA,GAAW,IAAI,CAAC,GAAL,CAAA,EAFR;GAAA,MAAA;IAIH,MAAM,CAAC,OAAP,GAAiB,SAAA;aAAO,IAAA,IAAA,CAAA,CAAM,CAAC,OAAP,CAAA,CAAJ,GAAuB;IAA1B;IACjB,QAAA,GAAe,IAAA,IAAA,CAAA,CAAM,CAAC,OAAP,CAAA,EALZ;;AAXL"
+}
\ No newline at end of file
diff --git a/familyark/app/node_modules/performance-now/license.txt b/familyark/app/node_modules/performance-now/license.txt
new file mode 100644
index 0000000..0bf51b4
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/license.txt
@@ -0,0 +1,7 @@
+Copyright (c) 2013 Braveg1rl
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
diff --git a/familyark/app/node_modules/performance-now/package.json b/familyark/app/node_modules/performance-now/package.json
new file mode 100644
index 0000000..b7b4e46
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/package.json
@@ -0,0 +1,65 @@
+{
+ "_from": "performance-now@^2.1.0",
+ "_id": "performance-now@2.1.0",
+ "_inBundle": false,
+ "_integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=",
+ "_location": "/performance-now",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "performance-now@^2.1.0",
+ "name": "performance-now",
+ "escapedName": "performance-now",
+ "rawSpec": "^2.1.0",
+ "saveSpec": null,
+ "fetchSpec": "^2.1.0"
+ },
+ "_requiredBy": [
+ "/request"
+ ],
+ "_resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz",
+ "_shasum": "6309f4e0e5fa913ec1c69307ae364b4b377c9e7b",
+ "_spec": "performance-now@^2.1.0",
+ "_where": "/home/g/Workspace/hatthieves/familyARK/familyark/app/node_modules/request",
+ "author": {
+ "name": "Braveg1rl",
+ "email": "braveg1rl@outlook.com"
+ },
+ "bugs": {
+ "url": "https://github.com/braveg1rl/performance-now/issues"
+ },
+ "bundleDependencies": false,
+ "dependencies": {},
+ "deprecated": false,
+ "description": "Implements performance.now (based on process.hrtime).",
+ "devDependencies": {
+ "bluebird": "^3.4.7",
+ "call-delayed": "^1.0.0",
+ "chai": "^3.5.0",
+ "chai-increasing": "^1.2.0",
+ "coffee-script": "~1.12.2",
+ "mocha": "~3.2.0",
+ "pre-commit": "^1.2.2"
+ },
+ "homepage": "https://github.com/braveg1rl/performance-now",
+ "keywords": [],
+ "license": "MIT",
+ "main": "lib/performance-now.js",
+ "name": "performance-now",
+ "optionalDependencies": {},
+ "private": false,
+ "repository": {
+ "type": "git",
+ "url": "git://github.com/braveg1rl/performance-now.git"
+ },
+ "scripts": {
+ "build": "mkdir -p lib && rm -rf lib/* && node_modules/.bin/coffee --compile -m --output lib/ src/",
+ "prepublish": "npm test",
+ "pretest": "npm run build",
+ "test": "mocha",
+ "watch": "coffee --watch --compile --output lib/ src/"
+ },
+ "typings": "src/index.d.ts",
+ "version": "2.1.0"
+}
diff --git a/familyark/app/node_modules/performance-now/src/index.d.ts b/familyark/app/node_modules/performance-now/src/index.d.ts
new file mode 100644
index 0000000..68dca8e
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/src/index.d.ts
@@ -0,0 +1,8 @@
+// This file describes the package to typescript.
+
+/**
+ * Returns the number of milliseconds since the page was loaded (if browser)
+ * or the node process was started.
+ */
+declare function now(): number;
+export = now;
diff --git a/familyark/app/node_modules/performance-now/src/performance-now.coffee b/familyark/app/node_modules/performance-now/src/performance-now.coffee
new file mode 100644
index 0000000..a8e075a
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/src/performance-now.coffee
@@ -0,0 +1,17 @@
+if performance? and performance.now
+ module.exports = -> performance.now()
+else if process? and process.hrtime
+ module.exports = -> (getNanoSeconds() - nodeLoadTime) / 1e6
+ hrtime = process.hrtime
+ getNanoSeconds = ->
+ hr = hrtime()
+ hr[0] * 1e9 + hr[1]
+ moduleLoadTime = getNanoSeconds()
+ upTime = process.uptime() * 1e9
+ nodeLoadTime = moduleLoadTime - upTime
+else if Date.now
+ module.exports = -> Date.now() - loadTime
+ loadTime = Date.now()
+else
+ module.exports = -> new Date().getTime() - loadTime
+ loadTime = new Date().getTime()
diff --git a/familyark/app/node_modules/performance-now/test/mocha.opts b/familyark/app/node_modules/performance-now/test/mocha.opts
new file mode 100644
index 0000000..55d8492
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/test/mocha.opts
@@ -0,0 +1,3 @@
+--require coffee-script/register
+--compilers coffee:coffee-script/register
+--reporter spec
\ No newline at end of file
diff --git a/familyark/app/node_modules/performance-now/test/performance-now.coffee b/familyark/app/node_modules/performance-now/test/performance-now.coffee
new file mode 100644
index 0000000..c99e95c
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/test/performance-now.coffee
@@ -0,0 +1,43 @@
+chai = require "chai"
+chai.use(require "chai-increasing")
+{assert,expect} = chai
+Bluebird = require "bluebird"
+
+now = require "../"
+
+getUptime = -> process.uptime() * 1e3
+
+describe "now", ->
+ it "reported time differs at most 1ms from a freshly reported uptime", ->
+ assert.isAtMost Math.abs(now()-getUptime()), 1
+
+ it "two subsequent calls return an increasing number", ->
+ assert.isBelow now(), now()
+
+ it "has less than 10 microseconds overhead", ->
+ assert.isBelow Math.abs(now() - now()), 0.010
+
+ it "can be called 1 million times in under 1 second (averaging under 1 microsecond per call)", ->
+ @timeout 1000
+ now() for [0...1e6]
+ undefined
+
+ it "for 10,000 numbers, number n is never bigger than number n-1", ->
+ stamps = (now() for [1...10000])
+ expect(stamps).to.be.increasing
+
+ it "shows that at least 0.2 ms has passed after a timeout of 1 ms", ->
+ earlier = now()
+ Bluebird.resolve().delay(1).then -> assert.isAbove (now()-earlier), 0.2
+
+ it "shows that at most 3 ms has passed after a timeout of 1 ms", ->
+ earlier = now()
+ Bluebird.resolve().delay(1).then -> assert.isBelow (now()-earlier), 3
+
+ it "shows that at least 190ms ms has passed after a timeout of 200ms", ->
+ earlier = now()
+ Bluebird.resolve().delay(200).then -> assert.isAbove (now()-earlier), 190
+
+ it "shows that at most 220 ms has passed after a timeout of 200ms", ->
+ earlier = now()
+ Bluebird.resolve().delay(200).then -> assert.isBelow (now()-earlier), 220
diff --git a/familyark/app/node_modules/performance-now/test/scripts.coffee b/familyark/app/node_modules/performance-now/test/scripts.coffee
new file mode 100644
index 0000000..16312f1
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/test/scripts.coffee
@@ -0,0 +1,27 @@
+Bluebird = require "bluebird"
+exec = require("child_process").execSync
+{assert} = require "chai"
+
+describe "scripts/initital-value.coffee (module.uptime(), expressed in milliseconds)", ->
+ result = exec("./test/scripts/initial-value.coffee").toString().trim()
+ it "printed #{result}", ->
+ it "printed a value above 100", -> assert.isAbove result, 100
+ it "printed a value below 350", -> assert.isBelow result, 350
+
+describe "scripts/delayed-require.coffee (sum of uptime and 250 ms delay`)", ->
+ result = exec("./test/scripts/delayed-require.coffee").toString().trim()
+ it "printed #{result}", ->
+ it "printed a value above 350", -> assert.isAbove result, 350
+ it "printed a value below 600", -> assert.isBelow result, 600
+
+describe "scripts/delayed-call.coffee (sum of uptime and 250 ms delay`)", ->
+ result = exec("./test/scripts/delayed-call.coffee").toString().trim()
+ it "printed #{result}", ->
+ it "printed a value above 350", -> assert.isAbove result, 350
+ it "printed a value below 600", -> assert.isBelow result, 600
+
+describe "scripts/difference.coffee", ->
+ result = exec("./test/scripts/difference.coffee").toString().trim()
+ it "printed #{result}", ->
+ it "printed a value above 0.005", -> assert.isAbove result, 0.005
+ it "printed a value below 0.07", -> assert.isBelow result, 0.07
diff --git a/familyark/app/node_modules/performance-now/test/scripts/delayed-call.coffee b/familyark/app/node_modules/performance-now/test/scripts/delayed-call.coffee
new file mode 100755
index 0000000..0c3bab5
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/test/scripts/delayed-call.coffee
@@ -0,0 +1,11 @@
+#!/usr/bin/env ./node_modules/.bin/coffee
+
+###
+Expected output is a number above 350 and below 600.
+The time reported is relative to the time the node.js process was started
+this is approximately at `(Date.now() process.uptime() * 1000)`
+###
+
+delay = require "call-delayed"
+now = require "../../lib/performance-now"
+delay 250, -> console.log now().toFixed 3
diff --git a/familyark/app/node_modules/performance-now/test/scripts/delayed-require.coffee b/familyark/app/node_modules/performance-now/test/scripts/delayed-require.coffee
new file mode 100755
index 0000000..3ddee95
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/test/scripts/delayed-require.coffee
@@ -0,0 +1,12 @@
+#!/usr/bin/env ./node_modules/.bin/coffee
+
+###
+Expected output is a number above 350 and below 600.
+The time reported is relative to the time the node.js process was started
+this is approximately at `(Date.now() process.uptime() * 1000)`
+###
+
+delay = require "call-delayed"
+delay 250, ->
+ now = require "../../lib/performance-now"
+ console.log now().toFixed 3
diff --git a/familyark/app/node_modules/performance-now/test/scripts/difference.coffee b/familyark/app/node_modules/performance-now/test/scripts/difference.coffee
new file mode 100755
index 0000000..0b5edf6
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/test/scripts/difference.coffee
@@ -0,0 +1,6 @@
+#!/usr/bin/env ./node_modules/.bin/coffee
+
+# Expected output is above 0.005 and below 0.07.
+
+now = require('../../lib/performance-now')
+console.log -(now() - now()).toFixed 3
diff --git a/familyark/app/node_modules/performance-now/test/scripts/initial-value.coffee b/familyark/app/node_modules/performance-now/test/scripts/initial-value.coffee
new file mode 100755
index 0000000..19ef4e0
--- /dev/null
+++ b/familyark/app/node_modules/performance-now/test/scripts/initial-value.coffee
@@ -0,0 +1,10 @@
+#!/usr/bin/env ./node_modules/.bin/coffee
+
+###
+Expected output is a number above 100 and below 350.
+The time reported is relative to the time the node.js process was started
+this is approximately at `(Date.now() process.uptime() * 1000)`
+###
+
+now = require '../../lib/performance-now'
+console.log now().toFixed 3
diff --git a/familyark/app/node_modules/psl/LICENSE b/familyark/app/node_modules/psl/LICENSE
new file mode 100644
index 0000000..78d792e
--- /dev/null
+++ b/familyark/app/node_modules/psl/LICENSE
@@ -0,0 +1,9 @@
+The MIT License (MIT)
+
+Copyright (c) 2017 Lupo Montero lupomontero@gmail.com
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/familyark/app/node_modules/psl/README.md b/familyark/app/node_modules/psl/README.md
new file mode 100644
index 0000000..e876c3d
--- /dev/null
+++ b/familyark/app/node_modules/psl/README.md
@@ -0,0 +1,215 @@
+# psl (Public Suffix List)
+
+[](https://nodei.co/npm/psl/)
+
+[](https://greenkeeper.io/)
+[](https://travis-ci.org/lupomontero/psl)
+[](https://david-dm.org/lupomontero/psl#info=devDependencies)
+
+`psl` is a `JavaScript` domain name parser based on the
+[Public Suffix List](https://publicsuffix.org/).
+
+This implementation is tested against the
+[test data hosted by Mozilla](http://mxr.mozilla.org/mozilla-central/source/netwerk/test/unit/data/test_psl.txt?raw=1)
+and kindly provided by [Comodo](https://www.comodo.com/).
+
+Cross browser testing provided by
+[
](https://www.browserstack.com/)
+
+## What is the Public Suffix List?
+
+The Public Suffix List is a cross-vendor initiative to provide an accurate list
+of domain name suffixes.
+
+The Public Suffix List is an initiative of the Mozilla Project, but is
+maintained as a community resource. It is available for use in any software,
+but was originally created to meet the needs of browser manufacturers.
+
+A "public suffix" is one under which Internet users can directly register names.
+Some examples of public suffixes are ".com", ".co.uk" and "pvt.k12.wy.us". The
+Public Suffix List is a list of all known public suffixes.
+
+Source: http://publicsuffix.org
+
+
+## Installation
+
+### Node.js
+
+```sh
+npm install --save psl
+```
+
+### Browser
+
+Download [psl.min.js](https://raw.githubusercontent.com/lupomontero/psl/master/dist/psl.min.js)
+and include it in a script tag.
+
+```html
+
+```
+
+This script is browserified and wrapped in a [umd](https://github.com/umdjs/umd)
+wrapper so you should be able to use it standalone or together with a module
+loader.
+
+## API
+
+### `psl.parse(domain)`
+
+Parse domain based on Public Suffix List. Returns an `Object` with the following
+properties:
+
+* `tld`: Top level domain (this is the _public suffix_).
+* `sld`: Second level domain (the first private part of the domain name).
+* `domain`: The domain name is the `sld` + `tld`.
+* `subdomain`: Optional parts left of the domain.
+
+#### Example:
+
+```js
+var psl = require('psl');
+
+// Parse domain without subdomain
+var parsed = psl.parse('google.com');
+console.log(parsed.tld); // 'com'
+console.log(parsed.sld); // 'google'
+console.log(parsed.domain); // 'google.com'
+console.log(parsed.subdomain); // null
+
+// Parse domain with subdomain
+var parsed = psl.parse('www.google.com');
+console.log(parsed.tld); // 'com'
+console.log(parsed.sld); // 'google'
+console.log(parsed.domain); // 'google.com'
+console.log(parsed.subdomain); // 'www'
+
+// Parse domain with nested subdomains
+var parsed = psl.parse('a.b.c.d.foo.com');
+console.log(parsed.tld); // 'com'
+console.log(parsed.sld); // 'foo'
+console.log(parsed.domain); // 'foo.com'
+console.log(parsed.subdomain); // 'a.b.c.d'
+```
+
+### `psl.get(domain)`
+
+Get domain name, `sld` + `tld`. Returns `null` if not valid.
+
+#### Example:
+
+```js
+var psl = require('psl');
+
+// null input.
+psl.get(null); // null
+
+// Mixed case.
+psl.get('COM'); // null
+psl.get('example.COM'); // 'example.com'
+psl.get('WwW.example.COM'); // 'example.com'
+
+// Unlisted TLD.
+psl.get('example'); // null
+psl.get('example.example'); // 'example.example'
+psl.get('b.example.example'); // 'example.example'
+psl.get('a.b.example.example'); // 'example.example'
+
+// TLD with only 1 rule.
+psl.get('biz'); // null
+psl.get('domain.biz'); // 'domain.biz'
+psl.get('b.domain.biz'); // 'domain.biz'
+psl.get('a.b.domain.biz'); // 'domain.biz'
+
+// TLD with some 2-level rules.
+psl.get('uk.com'); // null);
+psl.get('example.uk.com'); // 'example.uk.com');
+psl.get('b.example.uk.com'); // 'example.uk.com');
+
+// More complex TLD.
+psl.get('c.kobe.jp'); // null
+psl.get('b.c.kobe.jp'); // 'b.c.kobe.jp'
+psl.get('a.b.c.kobe.jp'); // 'b.c.kobe.jp'
+psl.get('city.kobe.jp'); // 'city.kobe.jp'
+psl.get('www.city.kobe.jp'); // 'city.kobe.jp'
+
+// IDN labels.
+psl.get('食狮.com.cn'); // '食狮.com.cn'
+psl.get('食狮.公司.cn'); // '食狮.公司.cn'
+psl.get('www.食狮.公司.cn'); // '食狮.公司.cn'
+
+// Same as above, but punycoded.
+psl.get('xn--85x722f.com.cn'); // 'xn--85x722f.com.cn'
+psl.get('xn--85x722f.xn--55qx5d.cn'); // 'xn--85x722f.xn--55qx5d.cn'
+psl.get('www.xn--85x722f.xn--55qx5d.cn'); // 'xn--85x722f.xn--55qx5d.cn'
+```
+
+### `psl.isValid(domain)`
+
+Check whether a domain has a valid Public Suffix. Returns a `Boolean` indicating
+whether the domain has a valid Public Suffix.
+
+#### Example
+
+```js
+var psl = require('psl');
+
+psl.isValid('google.com'); // true
+psl.isValid('www.google.com'); // true
+psl.isValid('x.yz'); // false
+```
+
+
+## Testing and Building
+
+Test are written using [`mocha`](https://mochajs.org/) and can be
+run in two different environments: `node` and `phantomjs`.
+
+```sh
+# This will run `eslint`, `mocha` and `karma`.
+npm test
+
+# Individual test environments
+# Run tests in node only.
+./node_modules/.bin/mocha test
+# Run tests in phantomjs only.
+./node_modules/.bin/karma start ./karma.conf.js --single-run
+
+# Build data (parse raw list) and create dist files
+npm run build
+```
+
+Feel free to fork if you see possible improvements!
+
+
+## Acknowledgements
+
+* Mozilla Foundation's [Public Suffix List](https://publicsuffix.org/)
+* Thanks to Rob Stradling of [Comodo](https://www.comodo.com/) for providing
+ test data.
+* Inspired by [weppos/publicsuffix-ruby](https://github.com/weppos/publicsuffix-ruby)
+
+
+## License
+
+The MIT License (MIT)
+
+Copyright (c) 2017 Lupo Montero
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/familyark/app/node_modules/psl/browserstack-logo.svg b/familyark/app/node_modules/psl/browserstack-logo.svg
new file mode 100644
index 0000000..195f64d
--- /dev/null
+++ b/familyark/app/node_modules/psl/browserstack-logo.svg
@@ -0,0 +1,90 @@
+
+
+
diff --git a/familyark/app/node_modules/psl/data/rules.json b/familyark/app/node_modules/psl/data/rules.json
new file mode 100644
index 0000000..e19abdc
--- /dev/null
+++ b/familyark/app/node_modules/psl/data/rules.json
@@ -0,0 +1,8834 @@
+[
+"ac",
+"com.ac",
+"edu.ac",
+"gov.ac",
+"net.ac",
+"mil.ac",
+"org.ac",
+"ad",
+"nom.ad",
+"ae",
+"co.ae",
+"net.ae",
+"org.ae",
+"sch.ae",
+"ac.ae",
+"gov.ae",
+"mil.ae",
+"aero",
+"accident-investigation.aero",
+"accident-prevention.aero",
+"aerobatic.aero",
+"aeroclub.aero",
+"aerodrome.aero",
+"agents.aero",
+"aircraft.aero",
+"airline.aero",
+"airport.aero",
+"air-surveillance.aero",
+"airtraffic.aero",
+"air-traffic-control.aero",
+"ambulance.aero",
+"amusement.aero",
+"association.aero",
+"author.aero",
+"ballooning.aero",
+"broker.aero",
+"caa.aero",
+"cargo.aero",
+"catering.aero",
+"certification.aero",
+"championship.aero",
+"charter.aero",
+"civilaviation.aero",
+"club.aero",
+"conference.aero",
+"consultant.aero",
+"consulting.aero",
+"control.aero",
+"council.aero",
+"crew.aero",
+"design.aero",
+"dgca.aero",
+"educator.aero",
+"emergency.aero",
+"engine.aero",
+"engineer.aero",
+"entertainment.aero",
+"equipment.aero",
+"exchange.aero",
+"express.aero",
+"federation.aero",
+"flight.aero",
+"freight.aero",
+"fuel.aero",
+"gliding.aero",
+"government.aero",
+"groundhandling.aero",
+"group.aero",
+"hanggliding.aero",
+"homebuilt.aero",
+"insurance.aero",
+"journal.aero",
+"journalist.aero",
+"leasing.aero",
+"logistics.aero",
+"magazine.aero",
+"maintenance.aero",
+"media.aero",
+"microlight.aero",
+"modelling.aero",
+"navigation.aero",
+"parachuting.aero",
+"paragliding.aero",
+"passenger-association.aero",
+"pilot.aero",
+"press.aero",
+"production.aero",
+"recreation.aero",
+"repbody.aero",
+"res.aero",
+"research.aero",
+"rotorcraft.aero",
+"safety.aero",
+"scientist.aero",
+"services.aero",
+"show.aero",
+"skydiving.aero",
+"software.aero",
+"student.aero",
+"trader.aero",
+"trading.aero",
+"trainer.aero",
+"union.aero",
+"workinggroup.aero",
+"works.aero",
+"af",
+"gov.af",
+"com.af",
+"org.af",
+"net.af",
+"edu.af",
+"ag",
+"com.ag",
+"org.ag",
+"net.ag",
+"co.ag",
+"nom.ag",
+"ai",
+"off.ai",
+"com.ai",
+"net.ai",
+"org.ai",
+"al",
+"com.al",
+"edu.al",
+"gov.al",
+"mil.al",
+"net.al",
+"org.al",
+"am",
+"co.am",
+"com.am",
+"commune.am",
+"net.am",
+"org.am",
+"ao",
+"ed.ao",
+"gv.ao",
+"og.ao",
+"co.ao",
+"pb.ao",
+"it.ao",
+"aq",
+"ar",
+"com.ar",
+"edu.ar",
+"gob.ar",
+"gov.ar",
+"int.ar",
+"mil.ar",
+"musica.ar",
+"net.ar",
+"org.ar",
+"tur.ar",
+"arpa",
+"e164.arpa",
+"in-addr.arpa",
+"ip6.arpa",
+"iris.arpa",
+"uri.arpa",
+"urn.arpa",
+"as",
+"gov.as",
+"asia",
+"at",
+"ac.at",
+"co.at",
+"gv.at",
+"or.at",
+"au",
+"com.au",
+"net.au",
+"org.au",
+"edu.au",
+"gov.au",
+"asn.au",
+"id.au",
+"info.au",
+"conf.au",
+"oz.au",
+"act.au",
+"nsw.au",
+"nt.au",
+"qld.au",
+"sa.au",
+"tas.au",
+"vic.au",
+"wa.au",
+"act.edu.au",
+"catholic.edu.au",
+"nsw.edu.au",
+"nt.edu.au",
+"qld.edu.au",
+"sa.edu.au",
+"tas.edu.au",
+"vic.edu.au",
+"wa.edu.au",
+"qld.gov.au",
+"sa.gov.au",
+"tas.gov.au",
+"vic.gov.au",
+"wa.gov.au",
+"education.tas.edu.au",
+"schools.nsw.edu.au",
+"aw",
+"com.aw",
+"ax",
+"az",
+"com.az",
+"net.az",
+"int.az",
+"gov.az",
+"org.az",
+"edu.az",
+"info.az",
+"pp.az",
+"mil.az",
+"name.az",
+"pro.az",
+"biz.az",
+"ba",
+"com.ba",
+"edu.ba",
+"gov.ba",
+"mil.ba",
+"net.ba",
+"org.ba",
+"bb",
+"biz.bb",
+"co.bb",
+"com.bb",
+"edu.bb",
+"gov.bb",
+"info.bb",
+"net.bb",
+"org.bb",
+"store.bb",
+"tv.bb",
+"*.bd",
+"be",
+"ac.be",
+"bf",
+"gov.bf",
+"bg",
+"a.bg",
+"b.bg",
+"c.bg",
+"d.bg",
+"e.bg",
+"f.bg",
+"g.bg",
+"h.bg",
+"i.bg",
+"j.bg",
+"k.bg",
+"l.bg",
+"m.bg",
+"n.bg",
+"o.bg",
+"p.bg",
+"q.bg",
+"r.bg",
+"s.bg",
+"t.bg",
+"u.bg",
+"v.bg",
+"w.bg",
+"x.bg",
+"y.bg",
+"z.bg",
+"0.bg",
+"1.bg",
+"2.bg",
+"3.bg",
+"4.bg",
+"5.bg",
+"6.bg",
+"7.bg",
+"8.bg",
+"9.bg",
+"bh",
+"com.bh",
+"edu.bh",
+"net.bh",
+"org.bh",
+"gov.bh",
+"bi",
+"co.bi",
+"com.bi",
+"edu.bi",
+"or.bi",
+"org.bi",
+"biz",
+"bj",
+"asso.bj",
+"barreau.bj",
+"gouv.bj",
+"bm",
+"com.bm",
+"edu.bm",
+"gov.bm",
+"net.bm",
+"org.bm",
+"bn",
+"com.bn",
+"edu.bn",
+"gov.bn",
+"net.bn",
+"org.bn",
+"bo",
+"com.bo",
+"edu.bo",
+"gob.bo",
+"int.bo",
+"org.bo",
+"net.bo",
+"mil.bo",
+"tv.bo",
+"web.bo",
+"academia.bo",
+"agro.bo",
+"arte.bo",
+"blog.bo",
+"bolivia.bo",
+"ciencia.bo",
+"cooperativa.bo",
+"democracia.bo",
+"deporte.bo",
+"ecologia.bo",
+"economia.bo",
+"empresa.bo",
+"indigena.bo",
+"industria.bo",
+"info.bo",
+"medicina.bo",
+"movimiento.bo",
+"musica.bo",
+"natural.bo",
+"nombre.bo",
+"noticias.bo",
+"patria.bo",
+"politica.bo",
+"profesional.bo",
+"plurinacional.bo",
+"pueblo.bo",
+"revista.bo",
+"salud.bo",
+"tecnologia.bo",
+"tksat.bo",
+"transporte.bo",
+"wiki.bo",
+"br",
+"9guacu.br",
+"abc.br",
+"adm.br",
+"adv.br",
+"agr.br",
+"aju.br",
+"am.br",
+"anani.br",
+"aparecida.br",
+"arq.br",
+"art.br",
+"ato.br",
+"b.br",
+"barueri.br",
+"belem.br",
+"bhz.br",
+"bio.br",
+"blog.br",
+"bmd.br",
+"boavista.br",
+"bsb.br",
+"campinagrande.br",
+"campinas.br",
+"caxias.br",
+"cim.br",
+"cng.br",
+"cnt.br",
+"com.br",
+"contagem.br",
+"coop.br",
+"cri.br",
+"cuiaba.br",
+"curitiba.br",
+"def.br",
+"ecn.br",
+"eco.br",
+"edu.br",
+"emp.br",
+"eng.br",
+"esp.br",
+"etc.br",
+"eti.br",
+"far.br",
+"feira.br",
+"flog.br",
+"floripa.br",
+"fm.br",
+"fnd.br",
+"fortal.br",
+"fot.br",
+"foz.br",
+"fst.br",
+"g12.br",
+"ggf.br",
+"goiania.br",
+"gov.br",
+"ac.gov.br",
+"al.gov.br",
+"am.gov.br",
+"ap.gov.br",
+"ba.gov.br",
+"ce.gov.br",
+"df.gov.br",
+"es.gov.br",
+"go.gov.br",
+"ma.gov.br",
+"mg.gov.br",
+"ms.gov.br",
+"mt.gov.br",
+"pa.gov.br",
+"pb.gov.br",
+"pe.gov.br",
+"pi.gov.br",
+"pr.gov.br",
+"rj.gov.br",
+"rn.gov.br",
+"ro.gov.br",
+"rr.gov.br",
+"rs.gov.br",
+"sc.gov.br",
+"se.gov.br",
+"sp.gov.br",
+"to.gov.br",
+"gru.br",
+"imb.br",
+"ind.br",
+"inf.br",
+"jab.br",
+"jampa.br",
+"jdf.br",
+"joinville.br",
+"jor.br",
+"jus.br",
+"leg.br",
+"lel.br",
+"londrina.br",
+"macapa.br",
+"maceio.br",
+"manaus.br",
+"maringa.br",
+"mat.br",
+"med.br",
+"mil.br",
+"morena.br",
+"mp.br",
+"mus.br",
+"natal.br",
+"net.br",
+"niteroi.br",
+"*.nom.br",
+"not.br",
+"ntr.br",
+"odo.br",
+"ong.br",
+"org.br",
+"osasco.br",
+"palmas.br",
+"poa.br",
+"ppg.br",
+"pro.br",
+"psc.br",
+"psi.br",
+"pvh.br",
+"qsl.br",
+"radio.br",
+"rec.br",
+"recife.br",
+"ribeirao.br",
+"rio.br",
+"riobranco.br",
+"riopreto.br",
+"salvador.br",
+"sampa.br",
+"santamaria.br",
+"santoandre.br",
+"saobernardo.br",
+"saogonca.br",
+"sjc.br",
+"slg.br",
+"slz.br",
+"sorocaba.br",
+"srv.br",
+"taxi.br",
+"tc.br",
+"teo.br",
+"the.br",
+"tmp.br",
+"trd.br",
+"tur.br",
+"tv.br",
+"udi.br",
+"vet.br",
+"vix.br",
+"vlog.br",
+"wiki.br",
+"zlg.br",
+"bs",
+"com.bs",
+"net.bs",
+"org.bs",
+"edu.bs",
+"gov.bs",
+"bt",
+"com.bt",
+"edu.bt",
+"gov.bt",
+"net.bt",
+"org.bt",
+"bv",
+"bw",
+"co.bw",
+"org.bw",
+"by",
+"gov.by",
+"mil.by",
+"com.by",
+"of.by",
+"bz",
+"com.bz",
+"net.bz",
+"org.bz",
+"edu.bz",
+"gov.bz",
+"ca",
+"ab.ca",
+"bc.ca",
+"mb.ca",
+"nb.ca",
+"nf.ca",
+"nl.ca",
+"ns.ca",
+"nt.ca",
+"nu.ca",
+"on.ca",
+"pe.ca",
+"qc.ca",
+"sk.ca",
+"yk.ca",
+"gc.ca",
+"cat",
+"cc",
+"cd",
+"gov.cd",
+"cf",
+"cg",
+"ch",
+"ci",
+"org.ci",
+"or.ci",
+"com.ci",
+"co.ci",
+"edu.ci",
+"ed.ci",
+"ac.ci",
+"net.ci",
+"go.ci",
+"asso.ci",
+"aéroport.ci",
+"int.ci",
+"presse.ci",
+"md.ci",
+"gouv.ci",
+"*.ck",
+"!www.ck",
+"cl",
+"aprendemas.cl",
+"co.cl",
+"gob.cl",
+"gov.cl",
+"mil.cl",
+"cm",
+"co.cm",
+"com.cm",
+"gov.cm",
+"net.cm",
+"cn",
+"ac.cn",
+"com.cn",
+"edu.cn",
+"gov.cn",
+"net.cn",
+"org.cn",
+"mil.cn",
+"公司.cn",
+"网络.cn",
+"網絡.cn",
+"ah.cn",
+"bj.cn",
+"cq.cn",
+"fj.cn",
+"gd.cn",
+"gs.cn",
+"gz.cn",
+"gx.cn",
+"ha.cn",
+"hb.cn",
+"he.cn",
+"hi.cn",
+"hl.cn",
+"hn.cn",
+"jl.cn",
+"js.cn",
+"jx.cn",
+"ln.cn",
+"nm.cn",
+"nx.cn",
+"qh.cn",
+"sc.cn",
+"sd.cn",
+"sh.cn",
+"sn.cn",
+"sx.cn",
+"tj.cn",
+"xj.cn",
+"xz.cn",
+"yn.cn",
+"zj.cn",
+"hk.cn",
+"mo.cn",
+"tw.cn",
+"co",
+"arts.co",
+"com.co",
+"edu.co",
+"firm.co",
+"gov.co",
+"info.co",
+"int.co",
+"mil.co",
+"net.co",
+"nom.co",
+"org.co",
+"rec.co",
+"web.co",
+"com",
+"coop",
+"cr",
+"ac.cr",
+"co.cr",
+"ed.cr",
+"fi.cr",
+"go.cr",
+"or.cr",
+"sa.cr",
+"cu",
+"com.cu",
+"edu.cu",
+"org.cu",
+"net.cu",
+"gov.cu",
+"inf.cu",
+"cv",
+"cw",
+"com.cw",
+"edu.cw",
+"net.cw",
+"org.cw",
+"cx",
+"gov.cx",
+"cy",
+"ac.cy",
+"biz.cy",
+"com.cy",
+"ekloges.cy",
+"gov.cy",
+"ltd.cy",
+"name.cy",
+"net.cy",
+"org.cy",
+"parliament.cy",
+"press.cy",
+"pro.cy",
+"tm.cy",
+"cz",
+"de",
+"dj",
+"dk",
+"dm",
+"com.dm",
+"net.dm",
+"org.dm",
+"edu.dm",
+"gov.dm",
+"do",
+"art.do",
+"com.do",
+"edu.do",
+"gob.do",
+"gov.do",
+"mil.do",
+"net.do",
+"org.do",
+"sld.do",
+"web.do",
+"dz",
+"com.dz",
+"org.dz",
+"net.dz",
+"gov.dz",
+"edu.dz",
+"asso.dz",
+"pol.dz",
+"art.dz",
+"ec",
+"com.ec",
+"info.ec",
+"net.ec",
+"fin.ec",
+"k12.ec",
+"med.ec",
+"pro.ec",
+"org.ec",
+"edu.ec",
+"gov.ec",
+"gob.ec",
+"mil.ec",
+"edu",
+"ee",
+"edu.ee",
+"gov.ee",
+"riik.ee",
+"lib.ee",
+"med.ee",
+"com.ee",
+"pri.ee",
+"aip.ee",
+"org.ee",
+"fie.ee",
+"eg",
+"com.eg",
+"edu.eg",
+"eun.eg",
+"gov.eg",
+"mil.eg",
+"name.eg",
+"net.eg",
+"org.eg",
+"sci.eg",
+"*.er",
+"es",
+"com.es",
+"nom.es",
+"org.es",
+"gob.es",
+"edu.es",
+"et",
+"com.et",
+"gov.et",
+"org.et",
+"edu.et",
+"biz.et",
+"name.et",
+"info.et",
+"net.et",
+"eu",
+"fi",
+"aland.fi",
+"fj",
+"ac.fj",
+"biz.fj",
+"com.fj",
+"gov.fj",
+"info.fj",
+"mil.fj",
+"name.fj",
+"net.fj",
+"org.fj",
+"pro.fj",
+"*.fk",
+"fm",
+"fo",
+"fr",
+"asso.fr",
+"com.fr",
+"gouv.fr",
+"nom.fr",
+"prd.fr",
+"tm.fr",
+"aeroport.fr",
+"avocat.fr",
+"avoues.fr",
+"cci.fr",
+"chambagri.fr",
+"chirurgiens-dentistes.fr",
+"experts-comptables.fr",
+"geometre-expert.fr",
+"greta.fr",
+"huissier-justice.fr",
+"medecin.fr",
+"notaires.fr",
+"pharmacien.fr",
+"port.fr",
+"veterinaire.fr",
+"ga",
+"gb",
+"gd",
+"ge",
+"com.ge",
+"edu.ge",
+"gov.ge",
+"org.ge",
+"mil.ge",
+"net.ge",
+"pvt.ge",
+"gf",
+"gg",
+"co.gg",
+"net.gg",
+"org.gg",
+"gh",
+"com.gh",
+"edu.gh",
+"gov.gh",
+"org.gh",
+"mil.gh",
+"gi",
+"com.gi",
+"ltd.gi",
+"gov.gi",
+"mod.gi",
+"edu.gi",
+"org.gi",
+"gl",
+"co.gl",
+"com.gl",
+"edu.gl",
+"net.gl",
+"org.gl",
+"gm",
+"gn",
+"ac.gn",
+"com.gn",
+"edu.gn",
+"gov.gn",
+"org.gn",
+"net.gn",
+"gov",
+"gp",
+"com.gp",
+"net.gp",
+"mobi.gp",
+"edu.gp",
+"org.gp",
+"asso.gp",
+"gq",
+"gr",
+"com.gr",
+"edu.gr",
+"net.gr",
+"org.gr",
+"gov.gr",
+"gs",
+"gt",
+"com.gt",
+"edu.gt",
+"gob.gt",
+"ind.gt",
+"mil.gt",
+"net.gt",
+"org.gt",
+"gu",
+"com.gu",
+"edu.gu",
+"gov.gu",
+"guam.gu",
+"info.gu",
+"net.gu",
+"org.gu",
+"web.gu",
+"gw",
+"gy",
+"co.gy",
+"com.gy",
+"edu.gy",
+"gov.gy",
+"net.gy",
+"org.gy",
+"hk",
+"com.hk",
+"edu.hk",
+"gov.hk",
+"idv.hk",
+"net.hk",
+"org.hk",
+"公司.hk",
+"教育.hk",
+"敎育.hk",
+"政府.hk",
+"個人.hk",
+"个人.hk",
+"箇人.hk",
+"網络.hk",
+"网络.hk",
+"组織.hk",
+"網絡.hk",
+"网絡.hk",
+"组织.hk",
+"組織.hk",
+"組织.hk",
+"hm",
+"hn",
+"com.hn",
+"edu.hn",
+"org.hn",
+"net.hn",
+"mil.hn",
+"gob.hn",
+"hr",
+"iz.hr",
+"from.hr",
+"name.hr",
+"com.hr",
+"ht",
+"com.ht",
+"shop.ht",
+"firm.ht",
+"info.ht",
+"adult.ht",
+"net.ht",
+"pro.ht",
+"org.ht",
+"med.ht",
+"art.ht",
+"coop.ht",
+"pol.ht",
+"asso.ht",
+"edu.ht",
+"rel.ht",
+"gouv.ht",
+"perso.ht",
+"hu",
+"co.hu",
+"info.hu",
+"org.hu",
+"priv.hu",
+"sport.hu",
+"tm.hu",
+"2000.hu",
+"agrar.hu",
+"bolt.hu",
+"casino.hu",
+"city.hu",
+"erotica.hu",
+"erotika.hu",
+"film.hu",
+"forum.hu",
+"games.hu",
+"hotel.hu",
+"ingatlan.hu",
+"jogasz.hu",
+"konyvelo.hu",
+"lakas.hu",
+"media.hu",
+"news.hu",
+"reklam.hu",
+"sex.hu",
+"shop.hu",
+"suli.hu",
+"szex.hu",
+"tozsde.hu",
+"utazas.hu",
+"video.hu",
+"id",
+"ac.id",
+"biz.id",
+"co.id",
+"desa.id",
+"go.id",
+"mil.id",
+"my.id",
+"net.id",
+"or.id",
+"ponpes.id",
+"sch.id",
+"web.id",
+"ie",
+"gov.ie",
+"il",
+"ac.il",
+"co.il",
+"gov.il",
+"idf.il",
+"k12.il",
+"muni.il",
+"net.il",
+"org.il",
+"im",
+"ac.im",
+"co.im",
+"com.im",
+"ltd.co.im",
+"net.im",
+"org.im",
+"plc.co.im",
+"tt.im",
+"tv.im",
+"in",
+"co.in",
+"firm.in",
+"net.in",
+"org.in",
+"gen.in",
+"ind.in",
+"nic.in",
+"ac.in",
+"edu.in",
+"res.in",
+"gov.in",
+"mil.in",
+"info",
+"int",
+"eu.int",
+"io",
+"com.io",
+"iq",
+"gov.iq",
+"edu.iq",
+"mil.iq",
+"com.iq",
+"org.iq",
+"net.iq",
+"ir",
+"ac.ir",
+"co.ir",
+"gov.ir",
+"id.ir",
+"net.ir",
+"org.ir",
+"sch.ir",
+"ایران.ir",
+"ايران.ir",
+"is",
+"net.is",
+"com.is",
+"edu.is",
+"gov.is",
+"org.is",
+"int.is",
+"it",
+"gov.it",
+"edu.it",
+"abr.it",
+"abruzzo.it",
+"aosta-valley.it",
+"aostavalley.it",
+"bas.it",
+"basilicata.it",
+"cal.it",
+"calabria.it",
+"cam.it",
+"campania.it",
+"emilia-romagna.it",
+"emiliaromagna.it",
+"emr.it",
+"friuli-v-giulia.it",
+"friuli-ve-giulia.it",
+"friuli-vegiulia.it",
+"friuli-venezia-giulia.it",
+"friuli-veneziagiulia.it",
+"friuli-vgiulia.it",
+"friuliv-giulia.it",
+"friulive-giulia.it",
+"friulivegiulia.it",
+"friulivenezia-giulia.it",
+"friuliveneziagiulia.it",
+"friulivgiulia.it",
+"fvg.it",
+"laz.it",
+"lazio.it",
+"lig.it",
+"liguria.it",
+"lom.it",
+"lombardia.it",
+"lombardy.it",
+"lucania.it",
+"mar.it",
+"marche.it",
+"mol.it",
+"molise.it",
+"piedmont.it",
+"piemonte.it",
+"pmn.it",
+"pug.it",
+"puglia.it",
+"sar.it",
+"sardegna.it",
+"sardinia.it",
+"sic.it",
+"sicilia.it",
+"sicily.it",
+"taa.it",
+"tos.it",
+"toscana.it",
+"trentin-sud-tirol.it",
+"trentin-süd-tirol.it",
+"trentin-sudtirol.it",
+"trentin-südtirol.it",
+"trentin-sued-tirol.it",
+"trentin-suedtirol.it",
+"trentino-a-adige.it",
+"trentino-aadige.it",
+"trentino-alto-adige.it",
+"trentino-altoadige.it",
+"trentino-s-tirol.it",
+"trentino-stirol.it",
+"trentino-sud-tirol.it",
+"trentino-süd-tirol.it",
+"trentino-sudtirol.it",
+"trentino-südtirol.it",
+"trentino-sued-tirol.it",
+"trentino-suedtirol.it",
+"trentino.it",
+"trentinoa-adige.it",
+"trentinoaadige.it",
+"trentinoalto-adige.it",
+"trentinoaltoadige.it",
+"trentinos-tirol.it",
+"trentinostirol.it",
+"trentinosud-tirol.it",
+"trentinosüd-tirol.it",
+"trentinosudtirol.it",
+"trentinosüdtirol.it",
+"trentinosued-tirol.it",
+"trentinosuedtirol.it",
+"trentinsud-tirol.it",
+"trentinsüd-tirol.it",
+"trentinsudtirol.it",
+"trentinsüdtirol.it",
+"trentinsued-tirol.it",
+"trentinsuedtirol.it",
+"tuscany.it",
+"umb.it",
+"umbria.it",
+"val-d-aosta.it",
+"val-daosta.it",
+"vald-aosta.it",
+"valdaosta.it",
+"valle-aosta.it",
+"valle-d-aosta.it",
+"valle-daosta.it",
+"valleaosta.it",
+"valled-aosta.it",
+"valledaosta.it",
+"vallee-aoste.it",
+"vallée-aoste.it",
+"vallee-d-aoste.it",
+"vallée-d-aoste.it",
+"valleeaoste.it",
+"valléeaoste.it",
+"valleedaoste.it",
+"valléedaoste.it",
+"vao.it",
+"vda.it",
+"ven.it",
+"veneto.it",
+"ag.it",
+"agrigento.it",
+"al.it",
+"alessandria.it",
+"alto-adige.it",
+"altoadige.it",
+"an.it",
+"ancona.it",
+"andria-barletta-trani.it",
+"andria-trani-barletta.it",
+"andriabarlettatrani.it",
+"andriatranibarletta.it",
+"ao.it",
+"aosta.it",
+"aoste.it",
+"ap.it",
+"aq.it",
+"aquila.it",
+"ar.it",
+"arezzo.it",
+"ascoli-piceno.it",
+"ascolipiceno.it",
+"asti.it",
+"at.it",
+"av.it",
+"avellino.it",
+"ba.it",
+"balsan-sudtirol.it",
+"balsan-südtirol.it",
+"balsan-suedtirol.it",
+"balsan.it",
+"bari.it",
+"barletta-trani-andria.it",
+"barlettatraniandria.it",
+"belluno.it",
+"benevento.it",
+"bergamo.it",
+"bg.it",
+"bi.it",
+"biella.it",
+"bl.it",
+"bn.it",
+"bo.it",
+"bologna.it",
+"bolzano-altoadige.it",
+"bolzano.it",
+"bozen-sudtirol.it",
+"bozen-südtirol.it",
+"bozen-suedtirol.it",
+"bozen.it",
+"br.it",
+"brescia.it",
+"brindisi.it",
+"bs.it",
+"bt.it",
+"bulsan-sudtirol.it",
+"bulsan-südtirol.it",
+"bulsan-suedtirol.it",
+"bulsan.it",
+"bz.it",
+"ca.it",
+"cagliari.it",
+"caltanissetta.it",
+"campidano-medio.it",
+"campidanomedio.it",
+"campobasso.it",
+"carbonia-iglesias.it",
+"carboniaiglesias.it",
+"carrara-massa.it",
+"carraramassa.it",
+"caserta.it",
+"catania.it",
+"catanzaro.it",
+"cb.it",
+"ce.it",
+"cesena-forli.it",
+"cesena-forlì.it",
+"cesenaforli.it",
+"cesenaforlì.it",
+"ch.it",
+"chieti.it",
+"ci.it",
+"cl.it",
+"cn.it",
+"co.it",
+"como.it",
+"cosenza.it",
+"cr.it",
+"cremona.it",
+"crotone.it",
+"cs.it",
+"ct.it",
+"cuneo.it",
+"cz.it",
+"dell-ogliastra.it",
+"dellogliastra.it",
+"en.it",
+"enna.it",
+"fc.it",
+"fe.it",
+"fermo.it",
+"ferrara.it",
+"fg.it",
+"fi.it",
+"firenze.it",
+"florence.it",
+"fm.it",
+"foggia.it",
+"forli-cesena.it",
+"forlì-cesena.it",
+"forlicesena.it",
+"forlìcesena.it",
+"fr.it",
+"frosinone.it",
+"ge.it",
+"genoa.it",
+"genova.it",
+"go.it",
+"gorizia.it",
+"gr.it",
+"grosseto.it",
+"iglesias-carbonia.it",
+"iglesiascarbonia.it",
+"im.it",
+"imperia.it",
+"is.it",
+"isernia.it",
+"kr.it",
+"la-spezia.it",
+"laquila.it",
+"laspezia.it",
+"latina.it",
+"lc.it",
+"le.it",
+"lecce.it",
+"lecco.it",
+"li.it",
+"livorno.it",
+"lo.it",
+"lodi.it",
+"lt.it",
+"lu.it",
+"lucca.it",
+"macerata.it",
+"mantova.it",
+"massa-carrara.it",
+"massacarrara.it",
+"matera.it",
+"mb.it",
+"mc.it",
+"me.it",
+"medio-campidano.it",
+"mediocampidano.it",
+"messina.it",
+"mi.it",
+"milan.it",
+"milano.it",
+"mn.it",
+"mo.it",
+"modena.it",
+"monza-brianza.it",
+"monza-e-della-brianza.it",
+"monza.it",
+"monzabrianza.it",
+"monzaebrianza.it",
+"monzaedellabrianza.it",
+"ms.it",
+"mt.it",
+"na.it",
+"naples.it",
+"napoli.it",
+"no.it",
+"novara.it",
+"nu.it",
+"nuoro.it",
+"og.it",
+"ogliastra.it",
+"olbia-tempio.it",
+"olbiatempio.it",
+"or.it",
+"oristano.it",
+"ot.it",
+"pa.it",
+"padova.it",
+"padua.it",
+"palermo.it",
+"parma.it",
+"pavia.it",
+"pc.it",
+"pd.it",
+"pe.it",
+"perugia.it",
+"pesaro-urbino.it",
+"pesarourbino.it",
+"pescara.it",
+"pg.it",
+"pi.it",
+"piacenza.it",
+"pisa.it",
+"pistoia.it",
+"pn.it",
+"po.it",
+"pordenone.it",
+"potenza.it",
+"pr.it",
+"prato.it",
+"pt.it",
+"pu.it",
+"pv.it",
+"pz.it",
+"ra.it",
+"ragusa.it",
+"ravenna.it",
+"rc.it",
+"re.it",
+"reggio-calabria.it",
+"reggio-emilia.it",
+"reggiocalabria.it",
+"reggioemilia.it",
+"rg.it",
+"ri.it",
+"rieti.it",
+"rimini.it",
+"rm.it",
+"rn.it",
+"ro.it",
+"roma.it",
+"rome.it",
+"rovigo.it",
+"sa.it",
+"salerno.it",
+"sassari.it",
+"savona.it",
+"si.it",
+"siena.it",
+"siracusa.it",
+"so.it",
+"sondrio.it",
+"sp.it",
+"sr.it",
+"ss.it",
+"suedtirol.it",
+"südtirol.it",
+"sv.it",
+"ta.it",
+"taranto.it",
+"te.it",
+"tempio-olbia.it",
+"tempioolbia.it",
+"teramo.it",
+"terni.it",
+"tn.it",
+"to.it",
+"torino.it",
+"tp.it",
+"tr.it",
+"trani-andria-barletta.it",
+"trani-barletta-andria.it",
+"traniandriabarletta.it",
+"tranibarlettaandria.it",
+"trapani.it",
+"trento.it",
+"treviso.it",
+"trieste.it",
+"ts.it",
+"turin.it",
+"tv.it",
+"ud.it",
+"udine.it",
+"urbino-pesaro.it",
+"urbinopesaro.it",
+"va.it",
+"varese.it",
+"vb.it",
+"vc.it",
+"ve.it",
+"venezia.it",
+"venice.it",
+"verbania.it",
+"vercelli.it",
+"verona.it",
+"vi.it",
+"vibo-valentia.it",
+"vibovalentia.it",
+"vicenza.it",
+"viterbo.it",
+"vr.it",
+"vs.it",
+"vt.it",
+"vv.it",
+"je",
+"co.je",
+"net.je",
+"org.je",
+"*.jm",
+"jo",
+"com.jo",
+"org.jo",
+"net.jo",
+"edu.jo",
+"sch.jo",
+"gov.jo",
+"mil.jo",
+"name.jo",
+"jobs",
+"jp",
+"ac.jp",
+"ad.jp",
+"co.jp",
+"ed.jp",
+"go.jp",
+"gr.jp",
+"lg.jp",
+"ne.jp",
+"or.jp",
+"aichi.jp",
+"akita.jp",
+"aomori.jp",
+"chiba.jp",
+"ehime.jp",
+"fukui.jp",
+"fukuoka.jp",
+"fukushima.jp",
+"gifu.jp",
+"gunma.jp",
+"hiroshima.jp",
+"hokkaido.jp",
+"hyogo.jp",
+"ibaraki.jp",
+"ishikawa.jp",
+"iwate.jp",
+"kagawa.jp",
+"kagoshima.jp",
+"kanagawa.jp",
+"kochi.jp",
+"kumamoto.jp",
+"kyoto.jp",
+"mie.jp",
+"miyagi.jp",
+"miyazaki.jp",
+"nagano.jp",
+"nagasaki.jp",
+"nara.jp",
+"niigata.jp",
+"oita.jp",
+"okayama.jp",
+"okinawa.jp",
+"osaka.jp",
+"saga.jp",
+"saitama.jp",
+"shiga.jp",
+"shimane.jp",
+"shizuoka.jp",
+"tochigi.jp",
+"tokushima.jp",
+"tokyo.jp",
+"tottori.jp",
+"toyama.jp",
+"wakayama.jp",
+"yamagata.jp",
+"yamaguchi.jp",
+"yamanashi.jp",
+"栃木.jp",
+"愛知.jp",
+"愛媛.jp",
+"兵庫.jp",
+"熊本.jp",
+"茨城.jp",
+"北海道.jp",
+"千葉.jp",
+"和歌山.jp",
+"長崎.jp",
+"長野.jp",
+"新潟.jp",
+"青森.jp",
+"静岡.jp",
+"東京.jp",
+"石川.jp",
+"埼玉.jp",
+"三重.jp",
+"京都.jp",
+"佐賀.jp",
+"大分.jp",
+"大阪.jp",
+"奈良.jp",
+"宮城.jp",
+"宮崎.jp",
+"富山.jp",
+"山口.jp",
+"山形.jp",
+"山梨.jp",
+"岩手.jp",
+"岐阜.jp",
+"岡山.jp",
+"島根.jp",
+"広島.jp",
+"徳島.jp",
+"沖縄.jp",
+"滋賀.jp",
+"神奈川.jp",
+"福井.jp",
+"福岡.jp",
+"福島.jp",
+"秋田.jp",
+"群馬.jp",
+"香川.jp",
+"高知.jp",
+"鳥取.jp",
+"鹿児島.jp",
+"*.kawasaki.jp",
+"*.kitakyushu.jp",
+"*.kobe.jp",
+"*.nagoya.jp",
+"*.sapporo.jp",
+"*.sendai.jp",
+"*.yokohama.jp",
+"!city.kawasaki.jp",
+"!city.kitakyushu.jp",
+"!city.kobe.jp",
+"!city.nagoya.jp",
+"!city.sapporo.jp",
+"!city.sendai.jp",
+"!city.yokohama.jp",
+"aisai.aichi.jp",
+"ama.aichi.jp",
+"anjo.aichi.jp",
+"asuke.aichi.jp",
+"chiryu.aichi.jp",
+"chita.aichi.jp",
+"fuso.aichi.jp",
+"gamagori.aichi.jp",
+"handa.aichi.jp",
+"hazu.aichi.jp",
+"hekinan.aichi.jp",
+"higashiura.aichi.jp",
+"ichinomiya.aichi.jp",
+"inazawa.aichi.jp",
+"inuyama.aichi.jp",
+"isshiki.aichi.jp",
+"iwakura.aichi.jp",
+"kanie.aichi.jp",
+"kariya.aichi.jp",
+"kasugai.aichi.jp",
+"kira.aichi.jp",
+"kiyosu.aichi.jp",
+"komaki.aichi.jp",
+"konan.aichi.jp",
+"kota.aichi.jp",
+"mihama.aichi.jp",
+"miyoshi.aichi.jp",
+"nishio.aichi.jp",
+"nisshin.aichi.jp",
+"obu.aichi.jp",
+"oguchi.aichi.jp",
+"oharu.aichi.jp",
+"okazaki.aichi.jp",
+"owariasahi.aichi.jp",
+"seto.aichi.jp",
+"shikatsu.aichi.jp",
+"shinshiro.aichi.jp",
+"shitara.aichi.jp",
+"tahara.aichi.jp",
+"takahama.aichi.jp",
+"tobishima.aichi.jp",
+"toei.aichi.jp",
+"togo.aichi.jp",
+"tokai.aichi.jp",
+"tokoname.aichi.jp",
+"toyoake.aichi.jp",
+"toyohashi.aichi.jp",
+"toyokawa.aichi.jp",
+"toyone.aichi.jp",
+"toyota.aichi.jp",
+"tsushima.aichi.jp",
+"yatomi.aichi.jp",
+"akita.akita.jp",
+"daisen.akita.jp",
+"fujisato.akita.jp",
+"gojome.akita.jp",
+"hachirogata.akita.jp",
+"happou.akita.jp",
+"higashinaruse.akita.jp",
+"honjo.akita.jp",
+"honjyo.akita.jp",
+"ikawa.akita.jp",
+"kamikoani.akita.jp",
+"kamioka.akita.jp",
+"katagami.akita.jp",
+"kazuno.akita.jp",
+"kitaakita.akita.jp",
+"kosaka.akita.jp",
+"kyowa.akita.jp",
+"misato.akita.jp",
+"mitane.akita.jp",
+"moriyoshi.akita.jp",
+"nikaho.akita.jp",
+"noshiro.akita.jp",
+"odate.akita.jp",
+"oga.akita.jp",
+"ogata.akita.jp",
+"semboku.akita.jp",
+"yokote.akita.jp",
+"yurihonjo.akita.jp",
+"aomori.aomori.jp",
+"gonohe.aomori.jp",
+"hachinohe.aomori.jp",
+"hashikami.aomori.jp",
+"hiranai.aomori.jp",
+"hirosaki.aomori.jp",
+"itayanagi.aomori.jp",
+"kuroishi.aomori.jp",
+"misawa.aomori.jp",
+"mutsu.aomori.jp",
+"nakadomari.aomori.jp",
+"noheji.aomori.jp",
+"oirase.aomori.jp",
+"owani.aomori.jp",
+"rokunohe.aomori.jp",
+"sannohe.aomori.jp",
+"shichinohe.aomori.jp",
+"shingo.aomori.jp",
+"takko.aomori.jp",
+"towada.aomori.jp",
+"tsugaru.aomori.jp",
+"tsuruta.aomori.jp",
+"abiko.chiba.jp",
+"asahi.chiba.jp",
+"chonan.chiba.jp",
+"chosei.chiba.jp",
+"choshi.chiba.jp",
+"chuo.chiba.jp",
+"funabashi.chiba.jp",
+"futtsu.chiba.jp",
+"hanamigawa.chiba.jp",
+"ichihara.chiba.jp",
+"ichikawa.chiba.jp",
+"ichinomiya.chiba.jp",
+"inzai.chiba.jp",
+"isumi.chiba.jp",
+"kamagaya.chiba.jp",
+"kamogawa.chiba.jp",
+"kashiwa.chiba.jp",
+"katori.chiba.jp",
+"katsuura.chiba.jp",
+"kimitsu.chiba.jp",
+"kisarazu.chiba.jp",
+"kozaki.chiba.jp",
+"kujukuri.chiba.jp",
+"kyonan.chiba.jp",
+"matsudo.chiba.jp",
+"midori.chiba.jp",
+"mihama.chiba.jp",
+"minamiboso.chiba.jp",
+"mobara.chiba.jp",
+"mutsuzawa.chiba.jp",
+"nagara.chiba.jp",
+"nagareyama.chiba.jp",
+"narashino.chiba.jp",
+"narita.chiba.jp",
+"noda.chiba.jp",
+"oamishirasato.chiba.jp",
+"omigawa.chiba.jp",
+"onjuku.chiba.jp",
+"otaki.chiba.jp",
+"sakae.chiba.jp",
+"sakura.chiba.jp",
+"shimofusa.chiba.jp",
+"shirako.chiba.jp",
+"shiroi.chiba.jp",
+"shisui.chiba.jp",
+"sodegaura.chiba.jp",
+"sosa.chiba.jp",
+"tako.chiba.jp",
+"tateyama.chiba.jp",
+"togane.chiba.jp",
+"tohnosho.chiba.jp",
+"tomisato.chiba.jp",
+"urayasu.chiba.jp",
+"yachimata.chiba.jp",
+"yachiyo.chiba.jp",
+"yokaichiba.chiba.jp",
+"yokoshibahikari.chiba.jp",
+"yotsukaido.chiba.jp",
+"ainan.ehime.jp",
+"honai.ehime.jp",
+"ikata.ehime.jp",
+"imabari.ehime.jp",
+"iyo.ehime.jp",
+"kamijima.ehime.jp",
+"kihoku.ehime.jp",
+"kumakogen.ehime.jp",
+"masaki.ehime.jp",
+"matsuno.ehime.jp",
+"matsuyama.ehime.jp",
+"namikata.ehime.jp",
+"niihama.ehime.jp",
+"ozu.ehime.jp",
+"saijo.ehime.jp",
+"seiyo.ehime.jp",
+"shikokuchuo.ehime.jp",
+"tobe.ehime.jp",
+"toon.ehime.jp",
+"uchiko.ehime.jp",
+"uwajima.ehime.jp",
+"yawatahama.ehime.jp",
+"echizen.fukui.jp",
+"eiheiji.fukui.jp",
+"fukui.fukui.jp",
+"ikeda.fukui.jp",
+"katsuyama.fukui.jp",
+"mihama.fukui.jp",
+"minamiechizen.fukui.jp",
+"obama.fukui.jp",
+"ohi.fukui.jp",
+"ono.fukui.jp",
+"sabae.fukui.jp",
+"sakai.fukui.jp",
+"takahama.fukui.jp",
+"tsuruga.fukui.jp",
+"wakasa.fukui.jp",
+"ashiya.fukuoka.jp",
+"buzen.fukuoka.jp",
+"chikugo.fukuoka.jp",
+"chikuho.fukuoka.jp",
+"chikujo.fukuoka.jp",
+"chikushino.fukuoka.jp",
+"chikuzen.fukuoka.jp",
+"chuo.fukuoka.jp",
+"dazaifu.fukuoka.jp",
+"fukuchi.fukuoka.jp",
+"hakata.fukuoka.jp",
+"higashi.fukuoka.jp",
+"hirokawa.fukuoka.jp",
+"hisayama.fukuoka.jp",
+"iizuka.fukuoka.jp",
+"inatsuki.fukuoka.jp",
+"kaho.fukuoka.jp",
+"kasuga.fukuoka.jp",
+"kasuya.fukuoka.jp",
+"kawara.fukuoka.jp",
+"keisen.fukuoka.jp",
+"koga.fukuoka.jp",
+"kurate.fukuoka.jp",
+"kurogi.fukuoka.jp",
+"kurume.fukuoka.jp",
+"minami.fukuoka.jp",
+"miyako.fukuoka.jp",
+"miyama.fukuoka.jp",
+"miyawaka.fukuoka.jp",
+"mizumaki.fukuoka.jp",
+"munakata.fukuoka.jp",
+"nakagawa.fukuoka.jp",
+"nakama.fukuoka.jp",
+"nishi.fukuoka.jp",
+"nogata.fukuoka.jp",
+"ogori.fukuoka.jp",
+"okagaki.fukuoka.jp",
+"okawa.fukuoka.jp",
+"oki.fukuoka.jp",
+"omuta.fukuoka.jp",
+"onga.fukuoka.jp",
+"onojo.fukuoka.jp",
+"oto.fukuoka.jp",
+"saigawa.fukuoka.jp",
+"sasaguri.fukuoka.jp",
+"shingu.fukuoka.jp",
+"shinyoshitomi.fukuoka.jp",
+"shonai.fukuoka.jp",
+"soeda.fukuoka.jp",
+"sue.fukuoka.jp",
+"tachiarai.fukuoka.jp",
+"tagawa.fukuoka.jp",
+"takata.fukuoka.jp",
+"toho.fukuoka.jp",
+"toyotsu.fukuoka.jp",
+"tsuiki.fukuoka.jp",
+"ukiha.fukuoka.jp",
+"umi.fukuoka.jp",
+"usui.fukuoka.jp",
+"yamada.fukuoka.jp",
+"yame.fukuoka.jp",
+"yanagawa.fukuoka.jp",
+"yukuhashi.fukuoka.jp",
+"aizubange.fukushima.jp",
+"aizumisato.fukushima.jp",
+"aizuwakamatsu.fukushima.jp",
+"asakawa.fukushima.jp",
+"bandai.fukushima.jp",
+"date.fukushima.jp",
+"fukushima.fukushima.jp",
+"furudono.fukushima.jp",
+"futaba.fukushima.jp",
+"hanawa.fukushima.jp",
+"higashi.fukushima.jp",
+"hirata.fukushima.jp",
+"hirono.fukushima.jp",
+"iitate.fukushima.jp",
+"inawashiro.fukushima.jp",
+"ishikawa.fukushima.jp",
+"iwaki.fukushima.jp",
+"izumizaki.fukushima.jp",
+"kagamiishi.fukushima.jp",
+"kaneyama.fukushima.jp",
+"kawamata.fukushima.jp",
+"kitakata.fukushima.jp",
+"kitashiobara.fukushima.jp",
+"koori.fukushima.jp",
+"koriyama.fukushima.jp",
+"kunimi.fukushima.jp",
+"miharu.fukushima.jp",
+"mishima.fukushima.jp",
+"namie.fukushima.jp",
+"nango.fukushima.jp",
+"nishiaizu.fukushima.jp",
+"nishigo.fukushima.jp",
+"okuma.fukushima.jp",
+"omotego.fukushima.jp",
+"ono.fukushima.jp",
+"otama.fukushima.jp",
+"samegawa.fukushima.jp",
+"shimogo.fukushima.jp",
+"shirakawa.fukushima.jp",
+"showa.fukushima.jp",
+"soma.fukushima.jp",
+"sukagawa.fukushima.jp",
+"taishin.fukushima.jp",
+"tamakawa.fukushima.jp",
+"tanagura.fukushima.jp",
+"tenei.fukushima.jp",
+"yabuki.fukushima.jp",
+"yamato.fukushima.jp",
+"yamatsuri.fukushima.jp",
+"yanaizu.fukushima.jp",
+"yugawa.fukushima.jp",
+"anpachi.gifu.jp",
+"ena.gifu.jp",
+"gifu.gifu.jp",
+"ginan.gifu.jp",
+"godo.gifu.jp",
+"gujo.gifu.jp",
+"hashima.gifu.jp",
+"hichiso.gifu.jp",
+"hida.gifu.jp",
+"higashishirakawa.gifu.jp",
+"ibigawa.gifu.jp",
+"ikeda.gifu.jp",
+"kakamigahara.gifu.jp",
+"kani.gifu.jp",
+"kasahara.gifu.jp",
+"kasamatsu.gifu.jp",
+"kawaue.gifu.jp",
+"kitagata.gifu.jp",
+"mino.gifu.jp",
+"minokamo.gifu.jp",
+"mitake.gifu.jp",
+"mizunami.gifu.jp",
+"motosu.gifu.jp",
+"nakatsugawa.gifu.jp",
+"ogaki.gifu.jp",
+"sakahogi.gifu.jp",
+"seki.gifu.jp",
+"sekigahara.gifu.jp",
+"shirakawa.gifu.jp",
+"tajimi.gifu.jp",
+"takayama.gifu.jp",
+"tarui.gifu.jp",
+"toki.gifu.jp",
+"tomika.gifu.jp",
+"wanouchi.gifu.jp",
+"yamagata.gifu.jp",
+"yaotsu.gifu.jp",
+"yoro.gifu.jp",
+"annaka.gunma.jp",
+"chiyoda.gunma.jp",
+"fujioka.gunma.jp",
+"higashiagatsuma.gunma.jp",
+"isesaki.gunma.jp",
+"itakura.gunma.jp",
+"kanna.gunma.jp",
+"kanra.gunma.jp",
+"katashina.gunma.jp",
+"kawaba.gunma.jp",
+"kiryu.gunma.jp",
+"kusatsu.gunma.jp",
+"maebashi.gunma.jp",
+"meiwa.gunma.jp",
+"midori.gunma.jp",
+"minakami.gunma.jp",
+"naganohara.gunma.jp",
+"nakanojo.gunma.jp",
+"nanmoku.gunma.jp",
+"numata.gunma.jp",
+"oizumi.gunma.jp",
+"ora.gunma.jp",
+"ota.gunma.jp",
+"shibukawa.gunma.jp",
+"shimonita.gunma.jp",
+"shinto.gunma.jp",
+"showa.gunma.jp",
+"takasaki.gunma.jp",
+"takayama.gunma.jp",
+"tamamura.gunma.jp",
+"tatebayashi.gunma.jp",
+"tomioka.gunma.jp",
+"tsukiyono.gunma.jp",
+"tsumagoi.gunma.jp",
+"ueno.gunma.jp",
+"yoshioka.gunma.jp",
+"asaminami.hiroshima.jp",
+"daiwa.hiroshima.jp",
+"etajima.hiroshima.jp",
+"fuchu.hiroshima.jp",
+"fukuyama.hiroshima.jp",
+"hatsukaichi.hiroshima.jp",
+"higashihiroshima.hiroshima.jp",
+"hongo.hiroshima.jp",
+"jinsekikogen.hiroshima.jp",
+"kaita.hiroshima.jp",
+"kui.hiroshima.jp",
+"kumano.hiroshima.jp",
+"kure.hiroshima.jp",
+"mihara.hiroshima.jp",
+"miyoshi.hiroshima.jp",
+"naka.hiroshima.jp",
+"onomichi.hiroshima.jp",
+"osakikamijima.hiroshima.jp",
+"otake.hiroshima.jp",
+"saka.hiroshima.jp",
+"sera.hiroshima.jp",
+"seranishi.hiroshima.jp",
+"shinichi.hiroshima.jp",
+"shobara.hiroshima.jp",
+"takehara.hiroshima.jp",
+"abashiri.hokkaido.jp",
+"abira.hokkaido.jp",
+"aibetsu.hokkaido.jp",
+"akabira.hokkaido.jp",
+"akkeshi.hokkaido.jp",
+"asahikawa.hokkaido.jp",
+"ashibetsu.hokkaido.jp",
+"ashoro.hokkaido.jp",
+"assabu.hokkaido.jp",
+"atsuma.hokkaido.jp",
+"bibai.hokkaido.jp",
+"biei.hokkaido.jp",
+"bifuka.hokkaido.jp",
+"bihoro.hokkaido.jp",
+"biratori.hokkaido.jp",
+"chippubetsu.hokkaido.jp",
+"chitose.hokkaido.jp",
+"date.hokkaido.jp",
+"ebetsu.hokkaido.jp",
+"embetsu.hokkaido.jp",
+"eniwa.hokkaido.jp",
+"erimo.hokkaido.jp",
+"esan.hokkaido.jp",
+"esashi.hokkaido.jp",
+"fukagawa.hokkaido.jp",
+"fukushima.hokkaido.jp",
+"furano.hokkaido.jp",
+"furubira.hokkaido.jp",
+"haboro.hokkaido.jp",
+"hakodate.hokkaido.jp",
+"hamatonbetsu.hokkaido.jp",
+"hidaka.hokkaido.jp",
+"higashikagura.hokkaido.jp",
+"higashikawa.hokkaido.jp",
+"hiroo.hokkaido.jp",
+"hokuryu.hokkaido.jp",
+"hokuto.hokkaido.jp",
+"honbetsu.hokkaido.jp",
+"horokanai.hokkaido.jp",
+"horonobe.hokkaido.jp",
+"ikeda.hokkaido.jp",
+"imakane.hokkaido.jp",
+"ishikari.hokkaido.jp",
+"iwamizawa.hokkaido.jp",
+"iwanai.hokkaido.jp",
+"kamifurano.hokkaido.jp",
+"kamikawa.hokkaido.jp",
+"kamishihoro.hokkaido.jp",
+"kamisunagawa.hokkaido.jp",
+"kamoenai.hokkaido.jp",
+"kayabe.hokkaido.jp",
+"kembuchi.hokkaido.jp",
+"kikonai.hokkaido.jp",
+"kimobetsu.hokkaido.jp",
+"kitahiroshima.hokkaido.jp",
+"kitami.hokkaido.jp",
+"kiyosato.hokkaido.jp",
+"koshimizu.hokkaido.jp",
+"kunneppu.hokkaido.jp",
+"kuriyama.hokkaido.jp",
+"kuromatsunai.hokkaido.jp",
+"kushiro.hokkaido.jp",
+"kutchan.hokkaido.jp",
+"kyowa.hokkaido.jp",
+"mashike.hokkaido.jp",
+"matsumae.hokkaido.jp",
+"mikasa.hokkaido.jp",
+"minamifurano.hokkaido.jp",
+"mombetsu.hokkaido.jp",
+"moseushi.hokkaido.jp",
+"mukawa.hokkaido.jp",
+"muroran.hokkaido.jp",
+"naie.hokkaido.jp",
+"nakagawa.hokkaido.jp",
+"nakasatsunai.hokkaido.jp",
+"nakatombetsu.hokkaido.jp",
+"nanae.hokkaido.jp",
+"nanporo.hokkaido.jp",
+"nayoro.hokkaido.jp",
+"nemuro.hokkaido.jp",
+"niikappu.hokkaido.jp",
+"niki.hokkaido.jp",
+"nishiokoppe.hokkaido.jp",
+"noboribetsu.hokkaido.jp",
+"numata.hokkaido.jp",
+"obihiro.hokkaido.jp",
+"obira.hokkaido.jp",
+"oketo.hokkaido.jp",
+"okoppe.hokkaido.jp",
+"otaru.hokkaido.jp",
+"otobe.hokkaido.jp",
+"otofuke.hokkaido.jp",
+"otoineppu.hokkaido.jp",
+"oumu.hokkaido.jp",
+"ozora.hokkaido.jp",
+"pippu.hokkaido.jp",
+"rankoshi.hokkaido.jp",
+"rebun.hokkaido.jp",
+"rikubetsu.hokkaido.jp",
+"rishiri.hokkaido.jp",
+"rishirifuji.hokkaido.jp",
+"saroma.hokkaido.jp",
+"sarufutsu.hokkaido.jp",
+"shakotan.hokkaido.jp",
+"shari.hokkaido.jp",
+"shibecha.hokkaido.jp",
+"shibetsu.hokkaido.jp",
+"shikabe.hokkaido.jp",
+"shikaoi.hokkaido.jp",
+"shimamaki.hokkaido.jp",
+"shimizu.hokkaido.jp",
+"shimokawa.hokkaido.jp",
+"shinshinotsu.hokkaido.jp",
+"shintoku.hokkaido.jp",
+"shiranuka.hokkaido.jp",
+"shiraoi.hokkaido.jp",
+"shiriuchi.hokkaido.jp",
+"sobetsu.hokkaido.jp",
+"sunagawa.hokkaido.jp",
+"taiki.hokkaido.jp",
+"takasu.hokkaido.jp",
+"takikawa.hokkaido.jp",
+"takinoue.hokkaido.jp",
+"teshikaga.hokkaido.jp",
+"tobetsu.hokkaido.jp",
+"tohma.hokkaido.jp",
+"tomakomai.hokkaido.jp",
+"tomari.hokkaido.jp",
+"toya.hokkaido.jp",
+"toyako.hokkaido.jp",
+"toyotomi.hokkaido.jp",
+"toyoura.hokkaido.jp",
+"tsubetsu.hokkaido.jp",
+"tsukigata.hokkaido.jp",
+"urakawa.hokkaido.jp",
+"urausu.hokkaido.jp",
+"uryu.hokkaido.jp",
+"utashinai.hokkaido.jp",
+"wakkanai.hokkaido.jp",
+"wassamu.hokkaido.jp",
+"yakumo.hokkaido.jp",
+"yoichi.hokkaido.jp",
+"aioi.hyogo.jp",
+"akashi.hyogo.jp",
+"ako.hyogo.jp",
+"amagasaki.hyogo.jp",
+"aogaki.hyogo.jp",
+"asago.hyogo.jp",
+"ashiya.hyogo.jp",
+"awaji.hyogo.jp",
+"fukusaki.hyogo.jp",
+"goshiki.hyogo.jp",
+"harima.hyogo.jp",
+"himeji.hyogo.jp",
+"ichikawa.hyogo.jp",
+"inagawa.hyogo.jp",
+"itami.hyogo.jp",
+"kakogawa.hyogo.jp",
+"kamigori.hyogo.jp",
+"kamikawa.hyogo.jp",
+"kasai.hyogo.jp",
+"kasuga.hyogo.jp",
+"kawanishi.hyogo.jp",
+"miki.hyogo.jp",
+"minamiawaji.hyogo.jp",
+"nishinomiya.hyogo.jp",
+"nishiwaki.hyogo.jp",
+"ono.hyogo.jp",
+"sanda.hyogo.jp",
+"sannan.hyogo.jp",
+"sasayama.hyogo.jp",
+"sayo.hyogo.jp",
+"shingu.hyogo.jp",
+"shinonsen.hyogo.jp",
+"shiso.hyogo.jp",
+"sumoto.hyogo.jp",
+"taishi.hyogo.jp",
+"taka.hyogo.jp",
+"takarazuka.hyogo.jp",
+"takasago.hyogo.jp",
+"takino.hyogo.jp",
+"tamba.hyogo.jp",
+"tatsuno.hyogo.jp",
+"toyooka.hyogo.jp",
+"yabu.hyogo.jp",
+"yashiro.hyogo.jp",
+"yoka.hyogo.jp",
+"yokawa.hyogo.jp",
+"ami.ibaraki.jp",
+"asahi.ibaraki.jp",
+"bando.ibaraki.jp",
+"chikusei.ibaraki.jp",
+"daigo.ibaraki.jp",
+"fujishiro.ibaraki.jp",
+"hitachi.ibaraki.jp",
+"hitachinaka.ibaraki.jp",
+"hitachiomiya.ibaraki.jp",
+"hitachiota.ibaraki.jp",
+"ibaraki.ibaraki.jp",
+"ina.ibaraki.jp",
+"inashiki.ibaraki.jp",
+"itako.ibaraki.jp",
+"iwama.ibaraki.jp",
+"joso.ibaraki.jp",
+"kamisu.ibaraki.jp",
+"kasama.ibaraki.jp",
+"kashima.ibaraki.jp",
+"kasumigaura.ibaraki.jp",
+"koga.ibaraki.jp",
+"miho.ibaraki.jp",
+"mito.ibaraki.jp",
+"moriya.ibaraki.jp",
+"naka.ibaraki.jp",
+"namegata.ibaraki.jp",
+"oarai.ibaraki.jp",
+"ogawa.ibaraki.jp",
+"omitama.ibaraki.jp",
+"ryugasaki.ibaraki.jp",
+"sakai.ibaraki.jp",
+"sakuragawa.ibaraki.jp",
+"shimodate.ibaraki.jp",
+"shimotsuma.ibaraki.jp",
+"shirosato.ibaraki.jp",
+"sowa.ibaraki.jp",
+"suifu.ibaraki.jp",
+"takahagi.ibaraki.jp",
+"tamatsukuri.ibaraki.jp",
+"tokai.ibaraki.jp",
+"tomobe.ibaraki.jp",
+"tone.ibaraki.jp",
+"toride.ibaraki.jp",
+"tsuchiura.ibaraki.jp",
+"tsukuba.ibaraki.jp",
+"uchihara.ibaraki.jp",
+"ushiku.ibaraki.jp",
+"yachiyo.ibaraki.jp",
+"yamagata.ibaraki.jp",
+"yawara.ibaraki.jp",
+"yuki.ibaraki.jp",
+"anamizu.ishikawa.jp",
+"hakui.ishikawa.jp",
+"hakusan.ishikawa.jp",
+"kaga.ishikawa.jp",
+"kahoku.ishikawa.jp",
+"kanazawa.ishikawa.jp",
+"kawakita.ishikawa.jp",
+"komatsu.ishikawa.jp",
+"nakanoto.ishikawa.jp",
+"nanao.ishikawa.jp",
+"nomi.ishikawa.jp",
+"nonoichi.ishikawa.jp",
+"noto.ishikawa.jp",
+"shika.ishikawa.jp",
+"suzu.ishikawa.jp",
+"tsubata.ishikawa.jp",
+"tsurugi.ishikawa.jp",
+"uchinada.ishikawa.jp",
+"wajima.ishikawa.jp",
+"fudai.iwate.jp",
+"fujisawa.iwate.jp",
+"hanamaki.iwate.jp",
+"hiraizumi.iwate.jp",
+"hirono.iwate.jp",
+"ichinohe.iwate.jp",
+"ichinoseki.iwate.jp",
+"iwaizumi.iwate.jp",
+"iwate.iwate.jp",
+"joboji.iwate.jp",
+"kamaishi.iwate.jp",
+"kanegasaki.iwate.jp",
+"karumai.iwate.jp",
+"kawai.iwate.jp",
+"kitakami.iwate.jp",
+"kuji.iwate.jp",
+"kunohe.iwate.jp",
+"kuzumaki.iwate.jp",
+"miyako.iwate.jp",
+"mizusawa.iwate.jp",
+"morioka.iwate.jp",
+"ninohe.iwate.jp",
+"noda.iwate.jp",
+"ofunato.iwate.jp",
+"oshu.iwate.jp",
+"otsuchi.iwate.jp",
+"rikuzentakata.iwate.jp",
+"shiwa.iwate.jp",
+"shizukuishi.iwate.jp",
+"sumita.iwate.jp",
+"tanohata.iwate.jp",
+"tono.iwate.jp",
+"yahaba.iwate.jp",
+"yamada.iwate.jp",
+"ayagawa.kagawa.jp",
+"higashikagawa.kagawa.jp",
+"kanonji.kagawa.jp",
+"kotohira.kagawa.jp",
+"manno.kagawa.jp",
+"marugame.kagawa.jp",
+"mitoyo.kagawa.jp",
+"naoshima.kagawa.jp",
+"sanuki.kagawa.jp",
+"tadotsu.kagawa.jp",
+"takamatsu.kagawa.jp",
+"tonosho.kagawa.jp",
+"uchinomi.kagawa.jp",
+"utazu.kagawa.jp",
+"zentsuji.kagawa.jp",
+"akune.kagoshima.jp",
+"amami.kagoshima.jp",
+"hioki.kagoshima.jp",
+"isa.kagoshima.jp",
+"isen.kagoshima.jp",
+"izumi.kagoshima.jp",
+"kagoshima.kagoshima.jp",
+"kanoya.kagoshima.jp",
+"kawanabe.kagoshima.jp",
+"kinko.kagoshima.jp",
+"kouyama.kagoshima.jp",
+"makurazaki.kagoshima.jp",
+"matsumoto.kagoshima.jp",
+"minamitane.kagoshima.jp",
+"nakatane.kagoshima.jp",
+"nishinoomote.kagoshima.jp",
+"satsumasendai.kagoshima.jp",
+"soo.kagoshima.jp",
+"tarumizu.kagoshima.jp",
+"yusui.kagoshima.jp",
+"aikawa.kanagawa.jp",
+"atsugi.kanagawa.jp",
+"ayase.kanagawa.jp",
+"chigasaki.kanagawa.jp",
+"ebina.kanagawa.jp",
+"fujisawa.kanagawa.jp",
+"hadano.kanagawa.jp",
+"hakone.kanagawa.jp",
+"hiratsuka.kanagawa.jp",
+"isehara.kanagawa.jp",
+"kaisei.kanagawa.jp",
+"kamakura.kanagawa.jp",
+"kiyokawa.kanagawa.jp",
+"matsuda.kanagawa.jp",
+"minamiashigara.kanagawa.jp",
+"miura.kanagawa.jp",
+"nakai.kanagawa.jp",
+"ninomiya.kanagawa.jp",
+"odawara.kanagawa.jp",
+"oi.kanagawa.jp",
+"oiso.kanagawa.jp",
+"sagamihara.kanagawa.jp",
+"samukawa.kanagawa.jp",
+"tsukui.kanagawa.jp",
+"yamakita.kanagawa.jp",
+"yamato.kanagawa.jp",
+"yokosuka.kanagawa.jp",
+"yugawara.kanagawa.jp",
+"zama.kanagawa.jp",
+"zushi.kanagawa.jp",
+"aki.kochi.jp",
+"geisei.kochi.jp",
+"hidaka.kochi.jp",
+"higashitsuno.kochi.jp",
+"ino.kochi.jp",
+"kagami.kochi.jp",
+"kami.kochi.jp",
+"kitagawa.kochi.jp",
+"kochi.kochi.jp",
+"mihara.kochi.jp",
+"motoyama.kochi.jp",
+"muroto.kochi.jp",
+"nahari.kochi.jp",
+"nakamura.kochi.jp",
+"nankoku.kochi.jp",
+"nishitosa.kochi.jp",
+"niyodogawa.kochi.jp",
+"ochi.kochi.jp",
+"okawa.kochi.jp",
+"otoyo.kochi.jp",
+"otsuki.kochi.jp",
+"sakawa.kochi.jp",
+"sukumo.kochi.jp",
+"susaki.kochi.jp",
+"tosa.kochi.jp",
+"tosashimizu.kochi.jp",
+"toyo.kochi.jp",
+"tsuno.kochi.jp",
+"umaji.kochi.jp",
+"yasuda.kochi.jp",
+"yusuhara.kochi.jp",
+"amakusa.kumamoto.jp",
+"arao.kumamoto.jp",
+"aso.kumamoto.jp",
+"choyo.kumamoto.jp",
+"gyokuto.kumamoto.jp",
+"kamiamakusa.kumamoto.jp",
+"kikuchi.kumamoto.jp",
+"kumamoto.kumamoto.jp",
+"mashiki.kumamoto.jp",
+"mifune.kumamoto.jp",
+"minamata.kumamoto.jp",
+"minamioguni.kumamoto.jp",
+"nagasu.kumamoto.jp",
+"nishihara.kumamoto.jp",
+"oguni.kumamoto.jp",
+"ozu.kumamoto.jp",
+"sumoto.kumamoto.jp",
+"takamori.kumamoto.jp",
+"uki.kumamoto.jp",
+"uto.kumamoto.jp",
+"yamaga.kumamoto.jp",
+"yamato.kumamoto.jp",
+"yatsushiro.kumamoto.jp",
+"ayabe.kyoto.jp",
+"fukuchiyama.kyoto.jp",
+"higashiyama.kyoto.jp",
+"ide.kyoto.jp",
+"ine.kyoto.jp",
+"joyo.kyoto.jp",
+"kameoka.kyoto.jp",
+"kamo.kyoto.jp",
+"kita.kyoto.jp",
+"kizu.kyoto.jp",
+"kumiyama.kyoto.jp",
+"kyotamba.kyoto.jp",
+"kyotanabe.kyoto.jp",
+"kyotango.kyoto.jp",
+"maizuru.kyoto.jp",
+"minami.kyoto.jp",
+"minamiyamashiro.kyoto.jp",
+"miyazu.kyoto.jp",
+"muko.kyoto.jp",
+"nagaokakyo.kyoto.jp",
+"nakagyo.kyoto.jp",
+"nantan.kyoto.jp",
+"oyamazaki.kyoto.jp",
+"sakyo.kyoto.jp",
+"seika.kyoto.jp",
+"tanabe.kyoto.jp",
+"uji.kyoto.jp",
+"ujitawara.kyoto.jp",
+"wazuka.kyoto.jp",
+"yamashina.kyoto.jp",
+"yawata.kyoto.jp",
+"asahi.mie.jp",
+"inabe.mie.jp",
+"ise.mie.jp",
+"kameyama.mie.jp",
+"kawagoe.mie.jp",
+"kiho.mie.jp",
+"kisosaki.mie.jp",
+"kiwa.mie.jp",
+"komono.mie.jp",
+"kumano.mie.jp",
+"kuwana.mie.jp",
+"matsusaka.mie.jp",
+"meiwa.mie.jp",
+"mihama.mie.jp",
+"minamiise.mie.jp",
+"misugi.mie.jp",
+"miyama.mie.jp",
+"nabari.mie.jp",
+"shima.mie.jp",
+"suzuka.mie.jp",
+"tado.mie.jp",
+"taiki.mie.jp",
+"taki.mie.jp",
+"tamaki.mie.jp",
+"toba.mie.jp",
+"tsu.mie.jp",
+"udono.mie.jp",
+"ureshino.mie.jp",
+"watarai.mie.jp",
+"yokkaichi.mie.jp",
+"furukawa.miyagi.jp",
+"higashimatsushima.miyagi.jp",
+"ishinomaki.miyagi.jp",
+"iwanuma.miyagi.jp",
+"kakuda.miyagi.jp",
+"kami.miyagi.jp",
+"kawasaki.miyagi.jp",
+"marumori.miyagi.jp",
+"matsushima.miyagi.jp",
+"minamisanriku.miyagi.jp",
+"misato.miyagi.jp",
+"murata.miyagi.jp",
+"natori.miyagi.jp",
+"ogawara.miyagi.jp",
+"ohira.miyagi.jp",
+"onagawa.miyagi.jp",
+"osaki.miyagi.jp",
+"rifu.miyagi.jp",
+"semine.miyagi.jp",
+"shibata.miyagi.jp",
+"shichikashuku.miyagi.jp",
+"shikama.miyagi.jp",
+"shiogama.miyagi.jp",
+"shiroishi.miyagi.jp",
+"tagajo.miyagi.jp",
+"taiwa.miyagi.jp",
+"tome.miyagi.jp",
+"tomiya.miyagi.jp",
+"wakuya.miyagi.jp",
+"watari.miyagi.jp",
+"yamamoto.miyagi.jp",
+"zao.miyagi.jp",
+"aya.miyazaki.jp",
+"ebino.miyazaki.jp",
+"gokase.miyazaki.jp",
+"hyuga.miyazaki.jp",
+"kadogawa.miyazaki.jp",
+"kawaminami.miyazaki.jp",
+"kijo.miyazaki.jp",
+"kitagawa.miyazaki.jp",
+"kitakata.miyazaki.jp",
+"kitaura.miyazaki.jp",
+"kobayashi.miyazaki.jp",
+"kunitomi.miyazaki.jp",
+"kushima.miyazaki.jp",
+"mimata.miyazaki.jp",
+"miyakonojo.miyazaki.jp",
+"miyazaki.miyazaki.jp",
+"morotsuka.miyazaki.jp",
+"nichinan.miyazaki.jp",
+"nishimera.miyazaki.jp",
+"nobeoka.miyazaki.jp",
+"saito.miyazaki.jp",
+"shiiba.miyazaki.jp",
+"shintomi.miyazaki.jp",
+"takaharu.miyazaki.jp",
+"takanabe.miyazaki.jp",
+"takazaki.miyazaki.jp",
+"tsuno.miyazaki.jp",
+"achi.nagano.jp",
+"agematsu.nagano.jp",
+"anan.nagano.jp",
+"aoki.nagano.jp",
+"asahi.nagano.jp",
+"azumino.nagano.jp",
+"chikuhoku.nagano.jp",
+"chikuma.nagano.jp",
+"chino.nagano.jp",
+"fujimi.nagano.jp",
+"hakuba.nagano.jp",
+"hara.nagano.jp",
+"hiraya.nagano.jp",
+"iida.nagano.jp",
+"iijima.nagano.jp",
+"iiyama.nagano.jp",
+"iizuna.nagano.jp",
+"ikeda.nagano.jp",
+"ikusaka.nagano.jp",
+"ina.nagano.jp",
+"karuizawa.nagano.jp",
+"kawakami.nagano.jp",
+"kiso.nagano.jp",
+"kisofukushima.nagano.jp",
+"kitaaiki.nagano.jp",
+"komagane.nagano.jp",
+"komoro.nagano.jp",
+"matsukawa.nagano.jp",
+"matsumoto.nagano.jp",
+"miasa.nagano.jp",
+"minamiaiki.nagano.jp",
+"minamimaki.nagano.jp",
+"minamiminowa.nagano.jp",
+"minowa.nagano.jp",
+"miyada.nagano.jp",
+"miyota.nagano.jp",
+"mochizuki.nagano.jp",
+"nagano.nagano.jp",
+"nagawa.nagano.jp",
+"nagiso.nagano.jp",
+"nakagawa.nagano.jp",
+"nakano.nagano.jp",
+"nozawaonsen.nagano.jp",
+"obuse.nagano.jp",
+"ogawa.nagano.jp",
+"okaya.nagano.jp",
+"omachi.nagano.jp",
+"omi.nagano.jp",
+"ookuwa.nagano.jp",
+"ooshika.nagano.jp",
+"otaki.nagano.jp",
+"otari.nagano.jp",
+"sakae.nagano.jp",
+"sakaki.nagano.jp",
+"saku.nagano.jp",
+"sakuho.nagano.jp",
+"shimosuwa.nagano.jp",
+"shinanomachi.nagano.jp",
+"shiojiri.nagano.jp",
+"suwa.nagano.jp",
+"suzaka.nagano.jp",
+"takagi.nagano.jp",
+"takamori.nagano.jp",
+"takayama.nagano.jp",
+"tateshina.nagano.jp",
+"tatsuno.nagano.jp",
+"togakushi.nagano.jp",
+"togura.nagano.jp",
+"tomi.nagano.jp",
+"ueda.nagano.jp",
+"wada.nagano.jp",
+"yamagata.nagano.jp",
+"yamanouchi.nagano.jp",
+"yasaka.nagano.jp",
+"yasuoka.nagano.jp",
+"chijiwa.nagasaki.jp",
+"futsu.nagasaki.jp",
+"goto.nagasaki.jp",
+"hasami.nagasaki.jp",
+"hirado.nagasaki.jp",
+"iki.nagasaki.jp",
+"isahaya.nagasaki.jp",
+"kawatana.nagasaki.jp",
+"kuchinotsu.nagasaki.jp",
+"matsuura.nagasaki.jp",
+"nagasaki.nagasaki.jp",
+"obama.nagasaki.jp",
+"omura.nagasaki.jp",
+"oseto.nagasaki.jp",
+"saikai.nagasaki.jp",
+"sasebo.nagasaki.jp",
+"seihi.nagasaki.jp",
+"shimabara.nagasaki.jp",
+"shinkamigoto.nagasaki.jp",
+"togitsu.nagasaki.jp",
+"tsushima.nagasaki.jp",
+"unzen.nagasaki.jp",
+"ando.nara.jp",
+"gose.nara.jp",
+"heguri.nara.jp",
+"higashiyoshino.nara.jp",
+"ikaruga.nara.jp",
+"ikoma.nara.jp",
+"kamikitayama.nara.jp",
+"kanmaki.nara.jp",
+"kashiba.nara.jp",
+"kashihara.nara.jp",
+"katsuragi.nara.jp",
+"kawai.nara.jp",
+"kawakami.nara.jp",
+"kawanishi.nara.jp",
+"koryo.nara.jp",
+"kurotaki.nara.jp",
+"mitsue.nara.jp",
+"miyake.nara.jp",
+"nara.nara.jp",
+"nosegawa.nara.jp",
+"oji.nara.jp",
+"ouda.nara.jp",
+"oyodo.nara.jp",
+"sakurai.nara.jp",
+"sango.nara.jp",
+"shimoichi.nara.jp",
+"shimokitayama.nara.jp",
+"shinjo.nara.jp",
+"soni.nara.jp",
+"takatori.nara.jp",
+"tawaramoto.nara.jp",
+"tenkawa.nara.jp",
+"tenri.nara.jp",
+"uda.nara.jp",
+"yamatokoriyama.nara.jp",
+"yamatotakada.nara.jp",
+"yamazoe.nara.jp",
+"yoshino.nara.jp",
+"aga.niigata.jp",
+"agano.niigata.jp",
+"gosen.niigata.jp",
+"itoigawa.niigata.jp",
+"izumozaki.niigata.jp",
+"joetsu.niigata.jp",
+"kamo.niigata.jp",
+"kariwa.niigata.jp",
+"kashiwazaki.niigata.jp",
+"minamiuonuma.niigata.jp",
+"mitsuke.niigata.jp",
+"muika.niigata.jp",
+"murakami.niigata.jp",
+"myoko.niigata.jp",
+"nagaoka.niigata.jp",
+"niigata.niigata.jp",
+"ojiya.niigata.jp",
+"omi.niigata.jp",
+"sado.niigata.jp",
+"sanjo.niigata.jp",
+"seiro.niigata.jp",
+"seirou.niigata.jp",
+"sekikawa.niigata.jp",
+"shibata.niigata.jp",
+"tagami.niigata.jp",
+"tainai.niigata.jp",
+"tochio.niigata.jp",
+"tokamachi.niigata.jp",
+"tsubame.niigata.jp",
+"tsunan.niigata.jp",
+"uonuma.niigata.jp",
+"yahiko.niigata.jp",
+"yoita.niigata.jp",
+"yuzawa.niigata.jp",
+"beppu.oita.jp",
+"bungoono.oita.jp",
+"bungotakada.oita.jp",
+"hasama.oita.jp",
+"hiji.oita.jp",
+"himeshima.oita.jp",
+"hita.oita.jp",
+"kamitsue.oita.jp",
+"kokonoe.oita.jp",
+"kuju.oita.jp",
+"kunisaki.oita.jp",
+"kusu.oita.jp",
+"oita.oita.jp",
+"saiki.oita.jp",
+"taketa.oita.jp",
+"tsukumi.oita.jp",
+"usa.oita.jp",
+"usuki.oita.jp",
+"yufu.oita.jp",
+"akaiwa.okayama.jp",
+"asakuchi.okayama.jp",
+"bizen.okayama.jp",
+"hayashima.okayama.jp",
+"ibara.okayama.jp",
+"kagamino.okayama.jp",
+"kasaoka.okayama.jp",
+"kibichuo.okayama.jp",
+"kumenan.okayama.jp",
+"kurashiki.okayama.jp",
+"maniwa.okayama.jp",
+"misaki.okayama.jp",
+"nagi.okayama.jp",
+"niimi.okayama.jp",
+"nishiawakura.okayama.jp",
+"okayama.okayama.jp",
+"satosho.okayama.jp",
+"setouchi.okayama.jp",
+"shinjo.okayama.jp",
+"shoo.okayama.jp",
+"soja.okayama.jp",
+"takahashi.okayama.jp",
+"tamano.okayama.jp",
+"tsuyama.okayama.jp",
+"wake.okayama.jp",
+"yakage.okayama.jp",
+"aguni.okinawa.jp",
+"ginowan.okinawa.jp",
+"ginoza.okinawa.jp",
+"gushikami.okinawa.jp",
+"haebaru.okinawa.jp",
+"higashi.okinawa.jp",
+"hirara.okinawa.jp",
+"iheya.okinawa.jp",
+"ishigaki.okinawa.jp",
+"ishikawa.okinawa.jp",
+"itoman.okinawa.jp",
+"izena.okinawa.jp",
+"kadena.okinawa.jp",
+"kin.okinawa.jp",
+"kitadaito.okinawa.jp",
+"kitanakagusuku.okinawa.jp",
+"kumejima.okinawa.jp",
+"kunigami.okinawa.jp",
+"minamidaito.okinawa.jp",
+"motobu.okinawa.jp",
+"nago.okinawa.jp",
+"naha.okinawa.jp",
+"nakagusuku.okinawa.jp",
+"nakijin.okinawa.jp",
+"nanjo.okinawa.jp",
+"nishihara.okinawa.jp",
+"ogimi.okinawa.jp",
+"okinawa.okinawa.jp",
+"onna.okinawa.jp",
+"shimoji.okinawa.jp",
+"taketomi.okinawa.jp",
+"tarama.okinawa.jp",
+"tokashiki.okinawa.jp",
+"tomigusuku.okinawa.jp",
+"tonaki.okinawa.jp",
+"urasoe.okinawa.jp",
+"uruma.okinawa.jp",
+"yaese.okinawa.jp",
+"yomitan.okinawa.jp",
+"yonabaru.okinawa.jp",
+"yonaguni.okinawa.jp",
+"zamami.okinawa.jp",
+"abeno.osaka.jp",
+"chihayaakasaka.osaka.jp",
+"chuo.osaka.jp",
+"daito.osaka.jp",
+"fujiidera.osaka.jp",
+"habikino.osaka.jp",
+"hannan.osaka.jp",
+"higashiosaka.osaka.jp",
+"higashisumiyoshi.osaka.jp",
+"higashiyodogawa.osaka.jp",
+"hirakata.osaka.jp",
+"ibaraki.osaka.jp",
+"ikeda.osaka.jp",
+"izumi.osaka.jp",
+"izumiotsu.osaka.jp",
+"izumisano.osaka.jp",
+"kadoma.osaka.jp",
+"kaizuka.osaka.jp",
+"kanan.osaka.jp",
+"kashiwara.osaka.jp",
+"katano.osaka.jp",
+"kawachinagano.osaka.jp",
+"kishiwada.osaka.jp",
+"kita.osaka.jp",
+"kumatori.osaka.jp",
+"matsubara.osaka.jp",
+"minato.osaka.jp",
+"minoh.osaka.jp",
+"misaki.osaka.jp",
+"moriguchi.osaka.jp",
+"neyagawa.osaka.jp",
+"nishi.osaka.jp",
+"nose.osaka.jp",
+"osakasayama.osaka.jp",
+"sakai.osaka.jp",
+"sayama.osaka.jp",
+"sennan.osaka.jp",
+"settsu.osaka.jp",
+"shijonawate.osaka.jp",
+"shimamoto.osaka.jp",
+"suita.osaka.jp",
+"tadaoka.osaka.jp",
+"taishi.osaka.jp",
+"tajiri.osaka.jp",
+"takaishi.osaka.jp",
+"takatsuki.osaka.jp",
+"tondabayashi.osaka.jp",
+"toyonaka.osaka.jp",
+"toyono.osaka.jp",
+"yao.osaka.jp",
+"ariake.saga.jp",
+"arita.saga.jp",
+"fukudomi.saga.jp",
+"genkai.saga.jp",
+"hamatama.saga.jp",
+"hizen.saga.jp",
+"imari.saga.jp",
+"kamimine.saga.jp",
+"kanzaki.saga.jp",
+"karatsu.saga.jp",
+"kashima.saga.jp",
+"kitagata.saga.jp",
+"kitahata.saga.jp",
+"kiyama.saga.jp",
+"kouhoku.saga.jp",
+"kyuragi.saga.jp",
+"nishiarita.saga.jp",
+"ogi.saga.jp",
+"omachi.saga.jp",
+"ouchi.saga.jp",
+"saga.saga.jp",
+"shiroishi.saga.jp",
+"taku.saga.jp",
+"tara.saga.jp",
+"tosu.saga.jp",
+"yoshinogari.saga.jp",
+"arakawa.saitama.jp",
+"asaka.saitama.jp",
+"chichibu.saitama.jp",
+"fujimi.saitama.jp",
+"fujimino.saitama.jp",
+"fukaya.saitama.jp",
+"hanno.saitama.jp",
+"hanyu.saitama.jp",
+"hasuda.saitama.jp",
+"hatogaya.saitama.jp",
+"hatoyama.saitama.jp",
+"hidaka.saitama.jp",
+"higashichichibu.saitama.jp",
+"higashimatsuyama.saitama.jp",
+"honjo.saitama.jp",
+"ina.saitama.jp",
+"iruma.saitama.jp",
+"iwatsuki.saitama.jp",
+"kamiizumi.saitama.jp",
+"kamikawa.saitama.jp",
+"kamisato.saitama.jp",
+"kasukabe.saitama.jp",
+"kawagoe.saitama.jp",
+"kawaguchi.saitama.jp",
+"kawajima.saitama.jp",
+"kazo.saitama.jp",
+"kitamoto.saitama.jp",
+"koshigaya.saitama.jp",
+"kounosu.saitama.jp",
+"kuki.saitama.jp",
+"kumagaya.saitama.jp",
+"matsubushi.saitama.jp",
+"minano.saitama.jp",
+"misato.saitama.jp",
+"miyashiro.saitama.jp",
+"miyoshi.saitama.jp",
+"moroyama.saitama.jp",
+"nagatoro.saitama.jp",
+"namegawa.saitama.jp",
+"niiza.saitama.jp",
+"ogano.saitama.jp",
+"ogawa.saitama.jp",
+"ogose.saitama.jp",
+"okegawa.saitama.jp",
+"omiya.saitama.jp",
+"otaki.saitama.jp",
+"ranzan.saitama.jp",
+"ryokami.saitama.jp",
+"saitama.saitama.jp",
+"sakado.saitama.jp",
+"satte.saitama.jp",
+"sayama.saitama.jp",
+"shiki.saitama.jp",
+"shiraoka.saitama.jp",
+"soka.saitama.jp",
+"sugito.saitama.jp",
+"toda.saitama.jp",
+"tokigawa.saitama.jp",
+"tokorozawa.saitama.jp",
+"tsurugashima.saitama.jp",
+"urawa.saitama.jp",
+"warabi.saitama.jp",
+"yashio.saitama.jp",
+"yokoze.saitama.jp",
+"yono.saitama.jp",
+"yorii.saitama.jp",
+"yoshida.saitama.jp",
+"yoshikawa.saitama.jp",
+"yoshimi.saitama.jp",
+"aisho.shiga.jp",
+"gamo.shiga.jp",
+"higashiomi.shiga.jp",
+"hikone.shiga.jp",
+"koka.shiga.jp",
+"konan.shiga.jp",
+"kosei.shiga.jp",
+"koto.shiga.jp",
+"kusatsu.shiga.jp",
+"maibara.shiga.jp",
+"moriyama.shiga.jp",
+"nagahama.shiga.jp",
+"nishiazai.shiga.jp",
+"notogawa.shiga.jp",
+"omihachiman.shiga.jp",
+"otsu.shiga.jp",
+"ritto.shiga.jp",
+"ryuoh.shiga.jp",
+"takashima.shiga.jp",
+"takatsuki.shiga.jp",
+"torahime.shiga.jp",
+"toyosato.shiga.jp",
+"yasu.shiga.jp",
+"akagi.shimane.jp",
+"ama.shimane.jp",
+"gotsu.shimane.jp",
+"hamada.shimane.jp",
+"higashiizumo.shimane.jp",
+"hikawa.shimane.jp",
+"hikimi.shimane.jp",
+"izumo.shimane.jp",
+"kakinoki.shimane.jp",
+"masuda.shimane.jp",
+"matsue.shimane.jp",
+"misato.shimane.jp",
+"nishinoshima.shimane.jp",
+"ohda.shimane.jp",
+"okinoshima.shimane.jp",
+"okuizumo.shimane.jp",
+"shimane.shimane.jp",
+"tamayu.shimane.jp",
+"tsuwano.shimane.jp",
+"unnan.shimane.jp",
+"yakumo.shimane.jp",
+"yasugi.shimane.jp",
+"yatsuka.shimane.jp",
+"arai.shizuoka.jp",
+"atami.shizuoka.jp",
+"fuji.shizuoka.jp",
+"fujieda.shizuoka.jp",
+"fujikawa.shizuoka.jp",
+"fujinomiya.shizuoka.jp",
+"fukuroi.shizuoka.jp",
+"gotemba.shizuoka.jp",
+"haibara.shizuoka.jp",
+"hamamatsu.shizuoka.jp",
+"higashiizu.shizuoka.jp",
+"ito.shizuoka.jp",
+"iwata.shizuoka.jp",
+"izu.shizuoka.jp",
+"izunokuni.shizuoka.jp",
+"kakegawa.shizuoka.jp",
+"kannami.shizuoka.jp",
+"kawanehon.shizuoka.jp",
+"kawazu.shizuoka.jp",
+"kikugawa.shizuoka.jp",
+"kosai.shizuoka.jp",
+"makinohara.shizuoka.jp",
+"matsuzaki.shizuoka.jp",
+"minamiizu.shizuoka.jp",
+"mishima.shizuoka.jp",
+"morimachi.shizuoka.jp",
+"nishiizu.shizuoka.jp",
+"numazu.shizuoka.jp",
+"omaezaki.shizuoka.jp",
+"shimada.shizuoka.jp",
+"shimizu.shizuoka.jp",
+"shimoda.shizuoka.jp",
+"shizuoka.shizuoka.jp",
+"susono.shizuoka.jp",
+"yaizu.shizuoka.jp",
+"yoshida.shizuoka.jp",
+"ashikaga.tochigi.jp",
+"bato.tochigi.jp",
+"haga.tochigi.jp",
+"ichikai.tochigi.jp",
+"iwafune.tochigi.jp",
+"kaminokawa.tochigi.jp",
+"kanuma.tochigi.jp",
+"karasuyama.tochigi.jp",
+"kuroiso.tochigi.jp",
+"mashiko.tochigi.jp",
+"mibu.tochigi.jp",
+"moka.tochigi.jp",
+"motegi.tochigi.jp",
+"nasu.tochigi.jp",
+"nasushiobara.tochigi.jp",
+"nikko.tochigi.jp",
+"nishikata.tochigi.jp",
+"nogi.tochigi.jp",
+"ohira.tochigi.jp",
+"ohtawara.tochigi.jp",
+"oyama.tochigi.jp",
+"sakura.tochigi.jp",
+"sano.tochigi.jp",
+"shimotsuke.tochigi.jp",
+"shioya.tochigi.jp",
+"takanezawa.tochigi.jp",
+"tochigi.tochigi.jp",
+"tsuga.tochigi.jp",
+"ujiie.tochigi.jp",
+"utsunomiya.tochigi.jp",
+"yaita.tochigi.jp",
+"aizumi.tokushima.jp",
+"anan.tokushima.jp",
+"ichiba.tokushima.jp",
+"itano.tokushima.jp",
+"kainan.tokushima.jp",
+"komatsushima.tokushima.jp",
+"matsushige.tokushima.jp",
+"mima.tokushima.jp",
+"minami.tokushima.jp",
+"miyoshi.tokushima.jp",
+"mugi.tokushima.jp",
+"nakagawa.tokushima.jp",
+"naruto.tokushima.jp",
+"sanagochi.tokushima.jp",
+"shishikui.tokushima.jp",
+"tokushima.tokushima.jp",
+"wajiki.tokushima.jp",
+"adachi.tokyo.jp",
+"akiruno.tokyo.jp",
+"akishima.tokyo.jp",
+"aogashima.tokyo.jp",
+"arakawa.tokyo.jp",
+"bunkyo.tokyo.jp",
+"chiyoda.tokyo.jp",
+"chofu.tokyo.jp",
+"chuo.tokyo.jp",
+"edogawa.tokyo.jp",
+"fuchu.tokyo.jp",
+"fussa.tokyo.jp",
+"hachijo.tokyo.jp",
+"hachioji.tokyo.jp",
+"hamura.tokyo.jp",
+"higashikurume.tokyo.jp",
+"higashimurayama.tokyo.jp",
+"higashiyamato.tokyo.jp",
+"hino.tokyo.jp",
+"hinode.tokyo.jp",
+"hinohara.tokyo.jp",
+"inagi.tokyo.jp",
+"itabashi.tokyo.jp",
+"katsushika.tokyo.jp",
+"kita.tokyo.jp",
+"kiyose.tokyo.jp",
+"kodaira.tokyo.jp",
+"koganei.tokyo.jp",
+"kokubunji.tokyo.jp",
+"komae.tokyo.jp",
+"koto.tokyo.jp",
+"kouzushima.tokyo.jp",
+"kunitachi.tokyo.jp",
+"machida.tokyo.jp",
+"meguro.tokyo.jp",
+"minato.tokyo.jp",
+"mitaka.tokyo.jp",
+"mizuho.tokyo.jp",
+"musashimurayama.tokyo.jp",
+"musashino.tokyo.jp",
+"nakano.tokyo.jp",
+"nerima.tokyo.jp",
+"ogasawara.tokyo.jp",
+"okutama.tokyo.jp",
+"ome.tokyo.jp",
+"oshima.tokyo.jp",
+"ota.tokyo.jp",
+"setagaya.tokyo.jp",
+"shibuya.tokyo.jp",
+"shinagawa.tokyo.jp",
+"shinjuku.tokyo.jp",
+"suginami.tokyo.jp",
+"sumida.tokyo.jp",
+"tachikawa.tokyo.jp",
+"taito.tokyo.jp",
+"tama.tokyo.jp",
+"toshima.tokyo.jp",
+"chizu.tottori.jp",
+"hino.tottori.jp",
+"kawahara.tottori.jp",
+"koge.tottori.jp",
+"kotoura.tottori.jp",
+"misasa.tottori.jp",
+"nanbu.tottori.jp",
+"nichinan.tottori.jp",
+"sakaiminato.tottori.jp",
+"tottori.tottori.jp",
+"wakasa.tottori.jp",
+"yazu.tottori.jp",
+"yonago.tottori.jp",
+"asahi.toyama.jp",
+"fuchu.toyama.jp",
+"fukumitsu.toyama.jp",
+"funahashi.toyama.jp",
+"himi.toyama.jp",
+"imizu.toyama.jp",
+"inami.toyama.jp",
+"johana.toyama.jp",
+"kamiichi.toyama.jp",
+"kurobe.toyama.jp",
+"nakaniikawa.toyama.jp",
+"namerikawa.toyama.jp",
+"nanto.toyama.jp",
+"nyuzen.toyama.jp",
+"oyabe.toyama.jp",
+"taira.toyama.jp",
+"takaoka.toyama.jp",
+"tateyama.toyama.jp",
+"toga.toyama.jp",
+"tonami.toyama.jp",
+"toyama.toyama.jp",
+"unazuki.toyama.jp",
+"uozu.toyama.jp",
+"yamada.toyama.jp",
+"arida.wakayama.jp",
+"aridagawa.wakayama.jp",
+"gobo.wakayama.jp",
+"hashimoto.wakayama.jp",
+"hidaka.wakayama.jp",
+"hirogawa.wakayama.jp",
+"inami.wakayama.jp",
+"iwade.wakayama.jp",
+"kainan.wakayama.jp",
+"kamitonda.wakayama.jp",
+"katsuragi.wakayama.jp",
+"kimino.wakayama.jp",
+"kinokawa.wakayama.jp",
+"kitayama.wakayama.jp",
+"koya.wakayama.jp",
+"koza.wakayama.jp",
+"kozagawa.wakayama.jp",
+"kudoyama.wakayama.jp",
+"kushimoto.wakayama.jp",
+"mihama.wakayama.jp",
+"misato.wakayama.jp",
+"nachikatsuura.wakayama.jp",
+"shingu.wakayama.jp",
+"shirahama.wakayama.jp",
+"taiji.wakayama.jp",
+"tanabe.wakayama.jp",
+"wakayama.wakayama.jp",
+"yuasa.wakayama.jp",
+"yura.wakayama.jp",
+"asahi.yamagata.jp",
+"funagata.yamagata.jp",
+"higashine.yamagata.jp",
+"iide.yamagata.jp",
+"kahoku.yamagata.jp",
+"kaminoyama.yamagata.jp",
+"kaneyama.yamagata.jp",
+"kawanishi.yamagata.jp",
+"mamurogawa.yamagata.jp",
+"mikawa.yamagata.jp",
+"murayama.yamagata.jp",
+"nagai.yamagata.jp",
+"nakayama.yamagata.jp",
+"nanyo.yamagata.jp",
+"nishikawa.yamagata.jp",
+"obanazawa.yamagata.jp",
+"oe.yamagata.jp",
+"oguni.yamagata.jp",
+"ohkura.yamagata.jp",
+"oishida.yamagata.jp",
+"sagae.yamagata.jp",
+"sakata.yamagata.jp",
+"sakegawa.yamagata.jp",
+"shinjo.yamagata.jp",
+"shirataka.yamagata.jp",
+"shonai.yamagata.jp",
+"takahata.yamagata.jp",
+"tendo.yamagata.jp",
+"tozawa.yamagata.jp",
+"tsuruoka.yamagata.jp",
+"yamagata.yamagata.jp",
+"yamanobe.yamagata.jp",
+"yonezawa.yamagata.jp",
+"yuza.yamagata.jp",
+"abu.yamaguchi.jp",
+"hagi.yamaguchi.jp",
+"hikari.yamaguchi.jp",
+"hofu.yamaguchi.jp",
+"iwakuni.yamaguchi.jp",
+"kudamatsu.yamaguchi.jp",
+"mitou.yamaguchi.jp",
+"nagato.yamaguchi.jp",
+"oshima.yamaguchi.jp",
+"shimonoseki.yamaguchi.jp",
+"shunan.yamaguchi.jp",
+"tabuse.yamaguchi.jp",
+"tokuyama.yamaguchi.jp",
+"toyota.yamaguchi.jp",
+"ube.yamaguchi.jp",
+"yuu.yamaguchi.jp",
+"chuo.yamanashi.jp",
+"doshi.yamanashi.jp",
+"fuefuki.yamanashi.jp",
+"fujikawa.yamanashi.jp",
+"fujikawaguchiko.yamanashi.jp",
+"fujiyoshida.yamanashi.jp",
+"hayakawa.yamanashi.jp",
+"hokuto.yamanashi.jp",
+"ichikawamisato.yamanashi.jp",
+"kai.yamanashi.jp",
+"kofu.yamanashi.jp",
+"koshu.yamanashi.jp",
+"kosuge.yamanashi.jp",
+"minami-alps.yamanashi.jp",
+"minobu.yamanashi.jp",
+"nakamichi.yamanashi.jp",
+"nanbu.yamanashi.jp",
+"narusawa.yamanashi.jp",
+"nirasaki.yamanashi.jp",
+"nishikatsura.yamanashi.jp",
+"oshino.yamanashi.jp",
+"otsuki.yamanashi.jp",
+"showa.yamanashi.jp",
+"tabayama.yamanashi.jp",
+"tsuru.yamanashi.jp",
+"uenohara.yamanashi.jp",
+"yamanakako.yamanashi.jp",
+"yamanashi.yamanashi.jp",
+"ke",
+"ac.ke",
+"co.ke",
+"go.ke",
+"info.ke",
+"me.ke",
+"mobi.ke",
+"ne.ke",
+"or.ke",
+"sc.ke",
+"kg",
+"org.kg",
+"net.kg",
+"com.kg",
+"edu.kg",
+"gov.kg",
+"mil.kg",
+"*.kh",
+"ki",
+"edu.ki",
+"biz.ki",
+"net.ki",
+"org.ki",
+"gov.ki",
+"info.ki",
+"com.ki",
+"km",
+"org.km",
+"nom.km",
+"gov.km",
+"prd.km",
+"tm.km",
+"edu.km",
+"mil.km",
+"ass.km",
+"com.km",
+"coop.km",
+"asso.km",
+"presse.km",
+"medecin.km",
+"notaires.km",
+"pharmaciens.km",
+"veterinaire.km",
+"gouv.km",
+"kn",
+"net.kn",
+"org.kn",
+"edu.kn",
+"gov.kn",
+"kp",
+"com.kp",
+"edu.kp",
+"gov.kp",
+"org.kp",
+"rep.kp",
+"tra.kp",
+"kr",
+"ac.kr",
+"co.kr",
+"es.kr",
+"go.kr",
+"hs.kr",
+"kg.kr",
+"mil.kr",
+"ms.kr",
+"ne.kr",
+"or.kr",
+"pe.kr",
+"re.kr",
+"sc.kr",
+"busan.kr",
+"chungbuk.kr",
+"chungnam.kr",
+"daegu.kr",
+"daejeon.kr",
+"gangwon.kr",
+"gwangju.kr",
+"gyeongbuk.kr",
+"gyeonggi.kr",
+"gyeongnam.kr",
+"incheon.kr",
+"jeju.kr",
+"jeonbuk.kr",
+"jeonnam.kr",
+"seoul.kr",
+"ulsan.kr",
+"kw",
+"com.kw",
+"edu.kw",
+"emb.kw",
+"gov.kw",
+"ind.kw",
+"net.kw",
+"org.kw",
+"ky",
+"edu.ky",
+"gov.ky",
+"com.ky",
+"org.ky",
+"net.ky",
+"kz",
+"org.kz",
+"edu.kz",
+"net.kz",
+"gov.kz",
+"mil.kz",
+"com.kz",
+"la",
+"int.la",
+"net.la",
+"info.la",
+"edu.la",
+"gov.la",
+"per.la",
+"com.la",
+"org.la",
+"lb",
+"com.lb",
+"edu.lb",
+"gov.lb",
+"net.lb",
+"org.lb",
+"lc",
+"com.lc",
+"net.lc",
+"co.lc",
+"org.lc",
+"edu.lc",
+"gov.lc",
+"li",
+"lk",
+"gov.lk",
+"sch.lk",
+"net.lk",
+"int.lk",
+"com.lk",
+"org.lk",
+"edu.lk",
+"ngo.lk",
+"soc.lk",
+"web.lk",
+"ltd.lk",
+"assn.lk",
+"grp.lk",
+"hotel.lk",
+"ac.lk",
+"lr",
+"com.lr",
+"edu.lr",
+"gov.lr",
+"org.lr",
+"net.lr",
+"ls",
+"ac.ls",
+"biz.ls",
+"co.ls",
+"edu.ls",
+"gov.ls",
+"info.ls",
+"net.ls",
+"org.ls",
+"sc.ls",
+"lt",
+"gov.lt",
+"lu",
+"lv",
+"com.lv",
+"edu.lv",
+"gov.lv",
+"org.lv",
+"mil.lv",
+"id.lv",
+"net.lv",
+"asn.lv",
+"conf.lv",
+"ly",
+"com.ly",
+"net.ly",
+"gov.ly",
+"plc.ly",
+"edu.ly",
+"sch.ly",
+"med.ly",
+"org.ly",
+"id.ly",
+"ma",
+"co.ma",
+"net.ma",
+"gov.ma",
+"org.ma",
+"ac.ma",
+"press.ma",
+"mc",
+"tm.mc",
+"asso.mc",
+"md",
+"me",
+"co.me",
+"net.me",
+"org.me",
+"edu.me",
+"ac.me",
+"gov.me",
+"its.me",
+"priv.me",
+"mg",
+"org.mg",
+"nom.mg",
+"gov.mg",
+"prd.mg",
+"tm.mg",
+"edu.mg",
+"mil.mg",
+"com.mg",
+"co.mg",
+"mh",
+"mil",
+"mk",
+"com.mk",
+"org.mk",
+"net.mk",
+"edu.mk",
+"gov.mk",
+"inf.mk",
+"name.mk",
+"ml",
+"com.ml",
+"edu.ml",
+"gouv.ml",
+"gov.ml",
+"net.ml",
+"org.ml",
+"presse.ml",
+"*.mm",
+"mn",
+"gov.mn",
+"edu.mn",
+"org.mn",
+"mo",
+"com.mo",
+"net.mo",
+"org.mo",
+"edu.mo",
+"gov.mo",
+"mobi",
+"mp",
+"mq",
+"mr",
+"gov.mr",
+"ms",
+"com.ms",
+"edu.ms",
+"gov.ms",
+"net.ms",
+"org.ms",
+"mt",
+"com.mt",
+"edu.mt",
+"net.mt",
+"org.mt",
+"mu",
+"com.mu",
+"net.mu",
+"org.mu",
+"gov.mu",
+"ac.mu",
+"co.mu",
+"or.mu",
+"museum",
+"academy.museum",
+"agriculture.museum",
+"air.museum",
+"airguard.museum",
+"alabama.museum",
+"alaska.museum",
+"amber.museum",
+"ambulance.museum",
+"american.museum",
+"americana.museum",
+"americanantiques.museum",
+"americanart.museum",
+"amsterdam.museum",
+"and.museum",
+"annefrank.museum",
+"anthro.museum",
+"anthropology.museum",
+"antiques.museum",
+"aquarium.museum",
+"arboretum.museum",
+"archaeological.museum",
+"archaeology.museum",
+"architecture.museum",
+"art.museum",
+"artanddesign.museum",
+"artcenter.museum",
+"artdeco.museum",
+"arteducation.museum",
+"artgallery.museum",
+"arts.museum",
+"artsandcrafts.museum",
+"asmatart.museum",
+"assassination.museum",
+"assisi.museum",
+"association.museum",
+"astronomy.museum",
+"atlanta.museum",
+"austin.museum",
+"australia.museum",
+"automotive.museum",
+"aviation.museum",
+"axis.museum",
+"badajoz.museum",
+"baghdad.museum",
+"bahn.museum",
+"bale.museum",
+"baltimore.museum",
+"barcelona.museum",
+"baseball.museum",
+"basel.museum",
+"baths.museum",
+"bauern.museum",
+"beauxarts.museum",
+"beeldengeluid.museum",
+"bellevue.museum",
+"bergbau.museum",
+"berkeley.museum",
+"berlin.museum",
+"bern.museum",
+"bible.museum",
+"bilbao.museum",
+"bill.museum",
+"birdart.museum",
+"birthplace.museum",
+"bonn.museum",
+"boston.museum",
+"botanical.museum",
+"botanicalgarden.museum",
+"botanicgarden.museum",
+"botany.museum",
+"brandywinevalley.museum",
+"brasil.museum",
+"bristol.museum",
+"british.museum",
+"britishcolumbia.museum",
+"broadcast.museum",
+"brunel.museum",
+"brussel.museum",
+"brussels.museum",
+"bruxelles.museum",
+"building.museum",
+"burghof.museum",
+"bus.museum",
+"bushey.museum",
+"cadaques.museum",
+"california.museum",
+"cambridge.museum",
+"can.museum",
+"canada.museum",
+"capebreton.museum",
+"carrier.museum",
+"cartoonart.museum",
+"casadelamoneda.museum",
+"castle.museum",
+"castres.museum",
+"celtic.museum",
+"center.museum",
+"chattanooga.museum",
+"cheltenham.museum",
+"chesapeakebay.museum",
+"chicago.museum",
+"children.museum",
+"childrens.museum",
+"childrensgarden.museum",
+"chiropractic.museum",
+"chocolate.museum",
+"christiansburg.museum",
+"cincinnati.museum",
+"cinema.museum",
+"circus.museum",
+"civilisation.museum",
+"civilization.museum",
+"civilwar.museum",
+"clinton.museum",
+"clock.museum",
+"coal.museum",
+"coastaldefence.museum",
+"cody.museum",
+"coldwar.museum",
+"collection.museum",
+"colonialwilliamsburg.museum",
+"coloradoplateau.museum",
+"columbia.museum",
+"columbus.museum",
+"communication.museum",
+"communications.museum",
+"community.museum",
+"computer.museum",
+"computerhistory.museum",
+"comunicações.museum",
+"contemporary.museum",
+"contemporaryart.museum",
+"convent.museum",
+"copenhagen.museum",
+"corporation.museum",
+"correios-e-telecomunicações.museum",
+"corvette.museum",
+"costume.museum",
+"countryestate.museum",
+"county.museum",
+"crafts.museum",
+"cranbrook.museum",
+"creation.museum",
+"cultural.museum",
+"culturalcenter.museum",
+"culture.museum",
+"cyber.museum",
+"cymru.museum",
+"dali.museum",
+"dallas.museum",
+"database.museum",
+"ddr.museum",
+"decorativearts.museum",
+"delaware.museum",
+"delmenhorst.museum",
+"denmark.museum",
+"depot.museum",
+"design.museum",
+"detroit.museum",
+"dinosaur.museum",
+"discovery.museum",
+"dolls.museum",
+"donostia.museum",
+"durham.museum",
+"eastafrica.museum",
+"eastcoast.museum",
+"education.museum",
+"educational.museum",
+"egyptian.museum",
+"eisenbahn.museum",
+"elburg.museum",
+"elvendrell.museum",
+"embroidery.museum",
+"encyclopedic.museum",
+"england.museum",
+"entomology.museum",
+"environment.museum",
+"environmentalconservation.museum",
+"epilepsy.museum",
+"essex.museum",
+"estate.museum",
+"ethnology.museum",
+"exeter.museum",
+"exhibition.museum",
+"family.museum",
+"farm.museum",
+"farmequipment.museum",
+"farmers.museum",
+"farmstead.museum",
+"field.museum",
+"figueres.museum",
+"filatelia.museum",
+"film.museum",
+"fineart.museum",
+"finearts.museum",
+"finland.museum",
+"flanders.museum",
+"florida.museum",
+"force.museum",
+"fortmissoula.museum",
+"fortworth.museum",
+"foundation.museum",
+"francaise.museum",
+"frankfurt.museum",
+"franziskaner.museum",
+"freemasonry.museum",
+"freiburg.museum",
+"fribourg.museum",
+"frog.museum",
+"fundacio.museum",
+"furniture.museum",
+"gallery.museum",
+"garden.museum",
+"gateway.museum",
+"geelvinck.museum",
+"gemological.museum",
+"geology.museum",
+"georgia.museum",
+"giessen.museum",
+"glas.museum",
+"glass.museum",
+"gorge.museum",
+"grandrapids.museum",
+"graz.museum",
+"guernsey.museum",
+"halloffame.museum",
+"hamburg.museum",
+"handson.museum",
+"harvestcelebration.museum",
+"hawaii.museum",
+"health.museum",
+"heimatunduhren.museum",
+"hellas.museum",
+"helsinki.museum",
+"hembygdsforbund.museum",
+"heritage.museum",
+"histoire.museum",
+"historical.museum",
+"historicalsociety.museum",
+"historichouses.museum",
+"historisch.museum",
+"historisches.museum",
+"history.museum",
+"historyofscience.museum",
+"horology.museum",
+"house.museum",
+"humanities.museum",
+"illustration.museum",
+"imageandsound.museum",
+"indian.museum",
+"indiana.museum",
+"indianapolis.museum",
+"indianmarket.museum",
+"intelligence.museum",
+"interactive.museum",
+"iraq.museum",
+"iron.museum",
+"isleofman.museum",
+"jamison.museum",
+"jefferson.museum",
+"jerusalem.museum",
+"jewelry.museum",
+"jewish.museum",
+"jewishart.museum",
+"jfk.museum",
+"journalism.museum",
+"judaica.museum",
+"judygarland.museum",
+"juedisches.museum",
+"juif.museum",
+"karate.museum",
+"karikatur.museum",
+"kids.museum",
+"koebenhavn.museum",
+"koeln.museum",
+"kunst.museum",
+"kunstsammlung.museum",
+"kunstunddesign.museum",
+"labor.museum",
+"labour.museum",
+"lajolla.museum",
+"lancashire.museum",
+"landes.museum",
+"lans.museum",
+"läns.museum",
+"larsson.museum",
+"lewismiller.museum",
+"lincoln.museum",
+"linz.museum",
+"living.museum",
+"livinghistory.museum",
+"localhistory.museum",
+"london.museum",
+"losangeles.museum",
+"louvre.museum",
+"loyalist.museum",
+"lucerne.museum",
+"luxembourg.museum",
+"luzern.museum",
+"mad.museum",
+"madrid.museum",
+"mallorca.museum",
+"manchester.museum",
+"mansion.museum",
+"mansions.museum",
+"manx.museum",
+"marburg.museum",
+"maritime.museum",
+"maritimo.museum",
+"maryland.museum",
+"marylhurst.museum",
+"media.museum",
+"medical.museum",
+"medizinhistorisches.museum",
+"meeres.museum",
+"memorial.museum",
+"mesaverde.museum",
+"michigan.museum",
+"midatlantic.museum",
+"military.museum",
+"mill.museum",
+"miners.museum",
+"mining.museum",
+"minnesota.museum",
+"missile.museum",
+"missoula.museum",
+"modern.museum",
+"moma.museum",
+"money.museum",
+"monmouth.museum",
+"monticello.museum",
+"montreal.museum",
+"moscow.museum",
+"motorcycle.museum",
+"muenchen.museum",
+"muenster.museum",
+"mulhouse.museum",
+"muncie.museum",
+"museet.museum",
+"museumcenter.museum",
+"museumvereniging.museum",
+"music.museum",
+"national.museum",
+"nationalfirearms.museum",
+"nationalheritage.museum",
+"nativeamerican.museum",
+"naturalhistory.museum",
+"naturalhistorymuseum.museum",
+"naturalsciences.museum",
+"nature.museum",
+"naturhistorisches.museum",
+"natuurwetenschappen.museum",
+"naumburg.museum",
+"naval.museum",
+"nebraska.museum",
+"neues.museum",
+"newhampshire.museum",
+"newjersey.museum",
+"newmexico.museum",
+"newport.museum",
+"newspaper.museum",
+"newyork.museum",
+"niepce.museum",
+"norfolk.museum",
+"north.museum",
+"nrw.museum",
+"nyc.museum",
+"nyny.museum",
+"oceanographic.museum",
+"oceanographique.museum",
+"omaha.museum",
+"online.museum",
+"ontario.museum",
+"openair.museum",
+"oregon.museum",
+"oregontrail.museum",
+"otago.museum",
+"oxford.museum",
+"pacific.museum",
+"paderborn.museum",
+"palace.museum",
+"paleo.museum",
+"palmsprings.museum",
+"panama.museum",
+"paris.museum",
+"pasadena.museum",
+"pharmacy.museum",
+"philadelphia.museum",
+"philadelphiaarea.museum",
+"philately.museum",
+"phoenix.museum",
+"photography.museum",
+"pilots.museum",
+"pittsburgh.museum",
+"planetarium.museum",
+"plantation.museum",
+"plants.museum",
+"plaza.museum",
+"portal.museum",
+"portland.museum",
+"portlligat.museum",
+"posts-and-telecommunications.museum",
+"preservation.museum",
+"presidio.museum",
+"press.museum",
+"project.museum",
+"public.museum",
+"pubol.museum",
+"quebec.museum",
+"railroad.museum",
+"railway.museum",
+"research.museum",
+"resistance.museum",
+"riodejaneiro.museum",
+"rochester.museum",
+"rockart.museum",
+"roma.museum",
+"russia.museum",
+"saintlouis.museum",
+"salem.museum",
+"salvadordali.museum",
+"salzburg.museum",
+"sandiego.museum",
+"sanfrancisco.museum",
+"santabarbara.museum",
+"santacruz.museum",
+"santafe.museum",
+"saskatchewan.museum",
+"satx.museum",
+"savannahga.museum",
+"schlesisches.museum",
+"schoenbrunn.museum",
+"schokoladen.museum",
+"school.museum",
+"schweiz.museum",
+"science.museum",
+"scienceandhistory.museum",
+"scienceandindustry.museum",
+"sciencecenter.museum",
+"sciencecenters.museum",
+"science-fiction.museum",
+"sciencehistory.museum",
+"sciences.museum",
+"sciencesnaturelles.museum",
+"scotland.museum",
+"seaport.museum",
+"settlement.museum",
+"settlers.museum",
+"shell.museum",
+"sherbrooke.museum",
+"sibenik.museum",
+"silk.museum",
+"ski.museum",
+"skole.museum",
+"society.museum",
+"sologne.museum",
+"soundandvision.museum",
+"southcarolina.museum",
+"southwest.museum",
+"space.museum",
+"spy.museum",
+"square.museum",
+"stadt.museum",
+"stalbans.museum",
+"starnberg.museum",
+"state.museum",
+"stateofdelaware.museum",
+"station.museum",
+"steam.museum",
+"steiermark.museum",
+"stjohn.museum",
+"stockholm.museum",
+"stpetersburg.museum",
+"stuttgart.museum",
+"suisse.museum",
+"surgeonshall.museum",
+"surrey.museum",
+"svizzera.museum",
+"sweden.museum",
+"sydney.museum",
+"tank.museum",
+"tcm.museum",
+"technology.museum",
+"telekommunikation.museum",
+"television.museum",
+"texas.museum",
+"textile.museum",
+"theater.museum",
+"time.museum",
+"timekeeping.museum",
+"topology.museum",
+"torino.museum",
+"touch.museum",
+"town.museum",
+"transport.museum",
+"tree.museum",
+"trolley.museum",
+"trust.museum",
+"trustee.museum",
+"uhren.museum",
+"ulm.museum",
+"undersea.museum",
+"university.museum",
+"usa.museum",
+"usantiques.museum",
+"usarts.museum",
+"uscountryestate.museum",
+"usculture.museum",
+"usdecorativearts.museum",
+"usgarden.museum",
+"ushistory.museum",
+"ushuaia.museum",
+"uslivinghistory.museum",
+"utah.museum",
+"uvic.museum",
+"valley.museum",
+"vantaa.museum",
+"versailles.museum",
+"viking.museum",
+"village.museum",
+"virginia.museum",
+"virtual.museum",
+"virtuel.museum",
+"vlaanderen.museum",
+"volkenkunde.museum",
+"wales.museum",
+"wallonie.museum",
+"war.museum",
+"washingtondc.museum",
+"watchandclock.museum",
+"watch-and-clock.museum",
+"western.museum",
+"westfalen.museum",
+"whaling.museum",
+"wildlife.museum",
+"williamsburg.museum",
+"windmill.museum",
+"workshop.museum",
+"york.museum",
+"yorkshire.museum",
+"yosemite.museum",
+"youth.museum",
+"zoological.museum",
+"zoology.museum",
+"ירושלים.museum",
+"иком.museum",
+"mv",
+"aero.mv",
+"biz.mv",
+"com.mv",
+"coop.mv",
+"edu.mv",
+"gov.mv",
+"info.mv",
+"int.mv",
+"mil.mv",
+"museum.mv",
+"name.mv",
+"net.mv",
+"org.mv",
+"pro.mv",
+"mw",
+"ac.mw",
+"biz.mw",
+"co.mw",
+"com.mw",
+"coop.mw",
+"edu.mw",
+"gov.mw",
+"int.mw",
+"museum.mw",
+"net.mw",
+"org.mw",
+"mx",
+"com.mx",
+"org.mx",
+"gob.mx",
+"edu.mx",
+"net.mx",
+"my",
+"com.my",
+"net.my",
+"org.my",
+"gov.my",
+"edu.my",
+"mil.my",
+"name.my",
+"mz",
+"ac.mz",
+"adv.mz",
+"co.mz",
+"edu.mz",
+"gov.mz",
+"mil.mz",
+"net.mz",
+"org.mz",
+"na",
+"info.na",
+"pro.na",
+"name.na",
+"school.na",
+"or.na",
+"dr.na",
+"us.na",
+"mx.na",
+"ca.na",
+"in.na",
+"cc.na",
+"tv.na",
+"ws.na",
+"mobi.na",
+"co.na",
+"com.na",
+"org.na",
+"name",
+"nc",
+"asso.nc",
+"nom.nc",
+"ne",
+"net",
+"nf",
+"com.nf",
+"net.nf",
+"per.nf",
+"rec.nf",
+"web.nf",
+"arts.nf",
+"firm.nf",
+"info.nf",
+"other.nf",
+"store.nf",
+"ng",
+"com.ng",
+"edu.ng",
+"gov.ng",
+"i.ng",
+"mil.ng",
+"mobi.ng",
+"name.ng",
+"net.ng",
+"org.ng",
+"sch.ng",
+"ni",
+"ac.ni",
+"biz.ni",
+"co.ni",
+"com.ni",
+"edu.ni",
+"gob.ni",
+"in.ni",
+"info.ni",
+"int.ni",
+"mil.ni",
+"net.ni",
+"nom.ni",
+"org.ni",
+"web.ni",
+"nl",
+"no",
+"fhs.no",
+"vgs.no",
+"fylkesbibl.no",
+"folkebibl.no",
+"museum.no",
+"idrett.no",
+"priv.no",
+"mil.no",
+"stat.no",
+"dep.no",
+"kommune.no",
+"herad.no",
+"aa.no",
+"ah.no",
+"bu.no",
+"fm.no",
+"hl.no",
+"hm.no",
+"jan-mayen.no",
+"mr.no",
+"nl.no",
+"nt.no",
+"of.no",
+"ol.no",
+"oslo.no",
+"rl.no",
+"sf.no",
+"st.no",
+"svalbard.no",
+"tm.no",
+"tr.no",
+"va.no",
+"vf.no",
+"gs.aa.no",
+"gs.ah.no",
+"gs.bu.no",
+"gs.fm.no",
+"gs.hl.no",
+"gs.hm.no",
+"gs.jan-mayen.no",
+"gs.mr.no",
+"gs.nl.no",
+"gs.nt.no",
+"gs.of.no",
+"gs.ol.no",
+"gs.oslo.no",
+"gs.rl.no",
+"gs.sf.no",
+"gs.st.no",
+"gs.svalbard.no",
+"gs.tm.no",
+"gs.tr.no",
+"gs.va.no",
+"gs.vf.no",
+"akrehamn.no",
+"åkrehamn.no",
+"algard.no",
+"ålgård.no",
+"arna.no",
+"brumunddal.no",
+"bryne.no",
+"bronnoysund.no",
+"brønnøysund.no",
+"drobak.no",
+"drøbak.no",
+"egersund.no",
+"fetsund.no",
+"floro.no",
+"florø.no",
+"fredrikstad.no",
+"hokksund.no",
+"honefoss.no",
+"hønefoss.no",
+"jessheim.no",
+"jorpeland.no",
+"jørpeland.no",
+"kirkenes.no",
+"kopervik.no",
+"krokstadelva.no",
+"langevag.no",
+"langevåg.no",
+"leirvik.no",
+"mjondalen.no",
+"mjøndalen.no",
+"mo-i-rana.no",
+"mosjoen.no",
+"mosjøen.no",
+"nesoddtangen.no",
+"orkanger.no",
+"osoyro.no",
+"osøyro.no",
+"raholt.no",
+"råholt.no",
+"sandnessjoen.no",
+"sandnessjøen.no",
+"skedsmokorset.no",
+"slattum.no",
+"spjelkavik.no",
+"stathelle.no",
+"stavern.no",
+"stjordalshalsen.no",
+"stjørdalshalsen.no",
+"tananger.no",
+"tranby.no",
+"vossevangen.no",
+"afjord.no",
+"åfjord.no",
+"agdenes.no",
+"al.no",
+"ål.no",
+"alesund.no",
+"ålesund.no",
+"alstahaug.no",
+"alta.no",
+"áltá.no",
+"alaheadju.no",
+"álaheadju.no",
+"alvdal.no",
+"amli.no",
+"åmli.no",
+"amot.no",
+"åmot.no",
+"andebu.no",
+"andoy.no",
+"andøy.no",
+"andasuolo.no",
+"ardal.no",
+"årdal.no",
+"aremark.no",
+"arendal.no",
+"ås.no",
+"aseral.no",
+"åseral.no",
+"asker.no",
+"askim.no",
+"askvoll.no",
+"askoy.no",
+"askøy.no",
+"asnes.no",
+"åsnes.no",
+"audnedaln.no",
+"aukra.no",
+"aure.no",
+"aurland.no",
+"aurskog-holand.no",
+"aurskog-høland.no",
+"austevoll.no",
+"austrheim.no",
+"averoy.no",
+"averøy.no",
+"balestrand.no",
+"ballangen.no",
+"balat.no",
+"bálát.no",
+"balsfjord.no",
+"bahccavuotna.no",
+"báhccavuotna.no",
+"bamble.no",
+"bardu.no",
+"beardu.no",
+"beiarn.no",
+"bajddar.no",
+"bájddar.no",
+"baidar.no",
+"báidár.no",
+"berg.no",
+"bergen.no",
+"berlevag.no",
+"berlevåg.no",
+"bearalvahki.no",
+"bearalváhki.no",
+"bindal.no",
+"birkenes.no",
+"bjarkoy.no",
+"bjarkøy.no",
+"bjerkreim.no",
+"bjugn.no",
+"bodo.no",
+"bodø.no",
+"badaddja.no",
+"bådåddjå.no",
+"budejju.no",
+"bokn.no",
+"bremanger.no",
+"bronnoy.no",
+"brønnøy.no",
+"bygland.no",
+"bykle.no",
+"barum.no",
+"bærum.no",
+"bo.telemark.no",
+"bø.telemark.no",
+"bo.nordland.no",
+"bø.nordland.no",
+"bievat.no",
+"bievát.no",
+"bomlo.no",
+"bømlo.no",
+"batsfjord.no",
+"båtsfjord.no",
+"bahcavuotna.no",
+"báhcavuotna.no",
+"dovre.no",
+"drammen.no",
+"drangedal.no",
+"dyroy.no",
+"dyrøy.no",
+"donna.no",
+"dønna.no",
+"eid.no",
+"eidfjord.no",
+"eidsberg.no",
+"eidskog.no",
+"eidsvoll.no",
+"eigersund.no",
+"elverum.no",
+"enebakk.no",
+"engerdal.no",
+"etne.no",
+"etnedal.no",
+"evenes.no",
+"evenassi.no",
+"evenášši.no",
+"evje-og-hornnes.no",
+"farsund.no",
+"fauske.no",
+"fuossko.no",
+"fuoisku.no",
+"fedje.no",
+"fet.no",
+"finnoy.no",
+"finnøy.no",
+"fitjar.no",
+"fjaler.no",
+"fjell.no",
+"flakstad.no",
+"flatanger.no",
+"flekkefjord.no",
+"flesberg.no",
+"flora.no",
+"fla.no",
+"flå.no",
+"folldal.no",
+"forsand.no",
+"fosnes.no",
+"frei.no",
+"frogn.no",
+"froland.no",
+"frosta.no",
+"frana.no",
+"fræna.no",
+"froya.no",
+"frøya.no",
+"fusa.no",
+"fyresdal.no",
+"forde.no",
+"førde.no",
+"gamvik.no",
+"gangaviika.no",
+"gáŋgaviika.no",
+"gaular.no",
+"gausdal.no",
+"gildeskal.no",
+"gildeskål.no",
+"giske.no",
+"gjemnes.no",
+"gjerdrum.no",
+"gjerstad.no",
+"gjesdal.no",
+"gjovik.no",
+"gjøvik.no",
+"gloppen.no",
+"gol.no",
+"gran.no",
+"grane.no",
+"granvin.no",
+"gratangen.no",
+"grimstad.no",
+"grong.no",
+"kraanghke.no",
+"kråanghke.no",
+"grue.no",
+"gulen.no",
+"hadsel.no",
+"halden.no",
+"halsa.no",
+"hamar.no",
+"hamaroy.no",
+"habmer.no",
+"hábmer.no",
+"hapmir.no",
+"hápmir.no",
+"hammerfest.no",
+"hammarfeasta.no",
+"hámmárfeasta.no",
+"haram.no",
+"hareid.no",
+"harstad.no",
+"hasvik.no",
+"aknoluokta.no",
+"ákŋoluokta.no",
+"hattfjelldal.no",
+"aarborte.no",
+"haugesund.no",
+"hemne.no",
+"hemnes.no",
+"hemsedal.no",
+"heroy.more-og-romsdal.no",
+"herøy.møre-og-romsdal.no",
+"heroy.nordland.no",
+"herøy.nordland.no",
+"hitra.no",
+"hjartdal.no",
+"hjelmeland.no",
+"hobol.no",
+"hobøl.no",
+"hof.no",
+"hol.no",
+"hole.no",
+"holmestrand.no",
+"holtalen.no",
+"holtålen.no",
+"hornindal.no",
+"horten.no",
+"hurdal.no",
+"hurum.no",
+"hvaler.no",
+"hyllestad.no",
+"hagebostad.no",
+"hægebostad.no",
+"hoyanger.no",
+"høyanger.no",
+"hoylandet.no",
+"høylandet.no",
+"ha.no",
+"hå.no",
+"ibestad.no",
+"inderoy.no",
+"inderøy.no",
+"iveland.no",
+"jevnaker.no",
+"jondal.no",
+"jolster.no",
+"jølster.no",
+"karasjok.no",
+"karasjohka.no",
+"kárášjohka.no",
+"karlsoy.no",
+"galsa.no",
+"gálsá.no",
+"karmoy.no",
+"karmøy.no",
+"kautokeino.no",
+"guovdageaidnu.no",
+"klepp.no",
+"klabu.no",
+"klæbu.no",
+"kongsberg.no",
+"kongsvinger.no",
+"kragero.no",
+"kragerø.no",
+"kristiansand.no",
+"kristiansund.no",
+"krodsherad.no",
+"krødsherad.no",
+"kvalsund.no",
+"rahkkeravju.no",
+"ráhkkerávju.no",
+"kvam.no",
+"kvinesdal.no",
+"kvinnherad.no",
+"kviteseid.no",
+"kvitsoy.no",
+"kvitsøy.no",
+"kvafjord.no",
+"kvæfjord.no",
+"giehtavuoatna.no",
+"kvanangen.no",
+"kvænangen.no",
+"navuotna.no",
+"návuotna.no",
+"kafjord.no",
+"kåfjord.no",
+"gaivuotna.no",
+"gáivuotna.no",
+"larvik.no",
+"lavangen.no",
+"lavagis.no",
+"loabat.no",
+"loabát.no",
+"lebesby.no",
+"davvesiida.no",
+"leikanger.no",
+"leirfjord.no",
+"leka.no",
+"leksvik.no",
+"lenvik.no",
+"leangaviika.no",
+"leaŋgaviika.no",
+"lesja.no",
+"levanger.no",
+"lier.no",
+"lierne.no",
+"lillehammer.no",
+"lillesand.no",
+"lindesnes.no",
+"lindas.no",
+"lindås.no",
+"lom.no",
+"loppa.no",
+"lahppi.no",
+"láhppi.no",
+"lund.no",
+"lunner.no",
+"luroy.no",
+"lurøy.no",
+"luster.no",
+"lyngdal.no",
+"lyngen.no",
+"ivgu.no",
+"lardal.no",
+"lerdal.no",
+"lærdal.no",
+"lodingen.no",
+"lødingen.no",
+"lorenskog.no",
+"lørenskog.no",
+"loten.no",
+"løten.no",
+"malvik.no",
+"masoy.no",
+"måsøy.no",
+"muosat.no",
+"muosát.no",
+"mandal.no",
+"marker.no",
+"marnardal.no",
+"masfjorden.no",
+"meland.no",
+"meldal.no",
+"melhus.no",
+"meloy.no",
+"meløy.no",
+"meraker.no",
+"meråker.no",
+"moareke.no",
+"moåreke.no",
+"midsund.no",
+"midtre-gauldal.no",
+"modalen.no",
+"modum.no",
+"molde.no",
+"moskenes.no",
+"moss.no",
+"mosvik.no",
+"malselv.no",
+"målselv.no",
+"malatvuopmi.no",
+"málatvuopmi.no",
+"namdalseid.no",
+"aejrie.no",
+"namsos.no",
+"namsskogan.no",
+"naamesjevuemie.no",
+"nååmesjevuemie.no",
+"laakesvuemie.no",
+"nannestad.no",
+"narvik.no",
+"narviika.no",
+"naustdal.no",
+"nedre-eiker.no",
+"nes.akershus.no",
+"nes.buskerud.no",
+"nesna.no",
+"nesodden.no",
+"nesseby.no",
+"unjarga.no",
+"unjárga.no",
+"nesset.no",
+"nissedal.no",
+"nittedal.no",
+"nord-aurdal.no",
+"nord-fron.no",
+"nord-odal.no",
+"norddal.no",
+"nordkapp.no",
+"davvenjarga.no",
+"davvenjárga.no",
+"nordre-land.no",
+"nordreisa.no",
+"raisa.no",
+"ráisa.no",
+"nore-og-uvdal.no",
+"notodden.no",
+"naroy.no",
+"nærøy.no",
+"notteroy.no",
+"nøtterøy.no",
+"odda.no",
+"oksnes.no",
+"øksnes.no",
+"oppdal.no",
+"oppegard.no",
+"oppegård.no",
+"orkdal.no",
+"orland.no",
+"ørland.no",
+"orskog.no",
+"ørskog.no",
+"orsta.no",
+"ørsta.no",
+"os.hedmark.no",
+"os.hordaland.no",
+"osen.no",
+"osteroy.no",
+"osterøy.no",
+"ostre-toten.no",
+"østre-toten.no",
+"overhalla.no",
+"ovre-eiker.no",
+"øvre-eiker.no",
+"oyer.no",
+"øyer.no",
+"oygarden.no",
+"øygarden.no",
+"oystre-slidre.no",
+"øystre-slidre.no",
+"porsanger.no",
+"porsangu.no",
+"porsáŋgu.no",
+"porsgrunn.no",
+"radoy.no",
+"radøy.no",
+"rakkestad.no",
+"rana.no",
+"ruovat.no",
+"randaberg.no",
+"rauma.no",
+"rendalen.no",
+"rennebu.no",
+"rennesoy.no",
+"rennesøy.no",
+"rindal.no",
+"ringebu.no",
+"ringerike.no",
+"ringsaker.no",
+"rissa.no",
+"risor.no",
+"risør.no",
+"roan.no",
+"rollag.no",
+"rygge.no",
+"ralingen.no",
+"rælingen.no",
+"rodoy.no",
+"rødøy.no",
+"romskog.no",
+"rømskog.no",
+"roros.no",
+"røros.no",
+"rost.no",
+"røst.no",
+"royken.no",
+"røyken.no",
+"royrvik.no",
+"røyrvik.no",
+"rade.no",
+"råde.no",
+"salangen.no",
+"siellak.no",
+"saltdal.no",
+"salat.no",
+"sálát.no",
+"sálat.no",
+"samnanger.no",
+"sande.more-og-romsdal.no",
+"sande.møre-og-romsdal.no",
+"sande.vestfold.no",
+"sandefjord.no",
+"sandnes.no",
+"sandoy.no",
+"sandøy.no",
+"sarpsborg.no",
+"sauda.no",
+"sauherad.no",
+"sel.no",
+"selbu.no",
+"selje.no",
+"seljord.no",
+"sigdal.no",
+"siljan.no",
+"sirdal.no",
+"skaun.no",
+"skedsmo.no",
+"ski.no",
+"skien.no",
+"skiptvet.no",
+"skjervoy.no",
+"skjervøy.no",
+"skierva.no",
+"skiervá.no",
+"skjak.no",
+"skjåk.no",
+"skodje.no",
+"skanland.no",
+"skånland.no",
+"skanit.no",
+"skánit.no",
+"smola.no",
+"smøla.no",
+"snillfjord.no",
+"snasa.no",
+"snåsa.no",
+"snoasa.no",
+"snaase.no",
+"snåase.no",
+"sogndal.no",
+"sokndal.no",
+"sola.no",
+"solund.no",
+"songdalen.no",
+"sortland.no",
+"spydeberg.no",
+"stange.no",
+"stavanger.no",
+"steigen.no",
+"steinkjer.no",
+"stjordal.no",
+"stjørdal.no",
+"stokke.no",
+"stor-elvdal.no",
+"stord.no",
+"stordal.no",
+"storfjord.no",
+"omasvuotna.no",
+"strand.no",
+"stranda.no",
+"stryn.no",
+"sula.no",
+"suldal.no",
+"sund.no",
+"sunndal.no",
+"surnadal.no",
+"sveio.no",
+"svelvik.no",
+"sykkylven.no",
+"sogne.no",
+"søgne.no",
+"somna.no",
+"sømna.no",
+"sondre-land.no",
+"søndre-land.no",
+"sor-aurdal.no",
+"sør-aurdal.no",
+"sor-fron.no",
+"sør-fron.no",
+"sor-odal.no",
+"sør-odal.no",
+"sor-varanger.no",
+"sør-varanger.no",
+"matta-varjjat.no",
+"mátta-várjjat.no",
+"sorfold.no",
+"sørfold.no",
+"sorreisa.no",
+"sørreisa.no",
+"sorum.no",
+"sørum.no",
+"tana.no",
+"deatnu.no",
+"time.no",
+"tingvoll.no",
+"tinn.no",
+"tjeldsund.no",
+"dielddanuorri.no",
+"tjome.no",
+"tjøme.no",
+"tokke.no",
+"tolga.no",
+"torsken.no",
+"tranoy.no",
+"tranøy.no",
+"tromso.no",
+"tromsø.no",
+"tromsa.no",
+"romsa.no",
+"trondheim.no",
+"troandin.no",
+"trysil.no",
+"trana.no",
+"træna.no",
+"trogstad.no",
+"trøgstad.no",
+"tvedestrand.no",
+"tydal.no",
+"tynset.no",
+"tysfjord.no",
+"divtasvuodna.no",
+"divttasvuotna.no",
+"tysnes.no",
+"tysvar.no",
+"tysvær.no",
+"tonsberg.no",
+"tønsberg.no",
+"ullensaker.no",
+"ullensvang.no",
+"ulvik.no",
+"utsira.no",
+"vadso.no",
+"vadsø.no",
+"cahcesuolo.no",
+"čáhcesuolo.no",
+"vaksdal.no",
+"valle.no",
+"vang.no",
+"vanylven.no",
+"vardo.no",
+"vardø.no",
+"varggat.no",
+"várggát.no",
+"vefsn.no",
+"vaapste.no",
+"vega.no",
+"vegarshei.no",
+"vegårshei.no",
+"vennesla.no",
+"verdal.no",
+"verran.no",
+"vestby.no",
+"vestnes.no",
+"vestre-slidre.no",
+"vestre-toten.no",
+"vestvagoy.no",
+"vestvågøy.no",
+"vevelstad.no",
+"vik.no",
+"vikna.no",
+"vindafjord.no",
+"volda.no",
+"voss.no",
+"varoy.no",
+"værøy.no",
+"vagan.no",
+"vågan.no",
+"voagat.no",
+"vagsoy.no",
+"vågsøy.no",
+"vaga.no",
+"vågå.no",
+"valer.ostfold.no",
+"våler.østfold.no",
+"valer.hedmark.no",
+"våler.hedmark.no",
+"*.np",
+"nr",
+"biz.nr",
+"info.nr",
+"gov.nr",
+"edu.nr",
+"org.nr",
+"net.nr",
+"com.nr",
+"nu",
+"nz",
+"ac.nz",
+"co.nz",
+"cri.nz",
+"geek.nz",
+"gen.nz",
+"govt.nz",
+"health.nz",
+"iwi.nz",
+"kiwi.nz",
+"maori.nz",
+"mil.nz",
+"māori.nz",
+"net.nz",
+"org.nz",
+"parliament.nz",
+"school.nz",
+"om",
+"co.om",
+"com.om",
+"edu.om",
+"gov.om",
+"med.om",
+"museum.om",
+"net.om",
+"org.om",
+"pro.om",
+"onion",
+"org",
+"pa",
+"ac.pa",
+"gob.pa",
+"com.pa",
+"org.pa",
+"sld.pa",
+"edu.pa",
+"net.pa",
+"ing.pa",
+"abo.pa",
+"med.pa",
+"nom.pa",
+"pe",
+"edu.pe",
+"gob.pe",
+"nom.pe",
+"mil.pe",
+"org.pe",
+"com.pe",
+"net.pe",
+"pf",
+"com.pf",
+"org.pf",
+"edu.pf",
+"*.pg",
+"ph",
+"com.ph",
+"net.ph",
+"org.ph",
+"gov.ph",
+"edu.ph",
+"ngo.ph",
+"mil.ph",
+"i.ph",
+"pk",
+"com.pk",
+"net.pk",
+"edu.pk",
+"org.pk",
+"fam.pk",
+"biz.pk",
+"web.pk",
+"gov.pk",
+"gob.pk",
+"gok.pk",
+"gon.pk",
+"gop.pk",
+"gos.pk",
+"info.pk",
+"pl",
+"com.pl",
+"net.pl",
+"org.pl",
+"aid.pl",
+"agro.pl",
+"atm.pl",
+"auto.pl",
+"biz.pl",
+"edu.pl",
+"gmina.pl",
+"gsm.pl",
+"info.pl",
+"mail.pl",
+"miasta.pl",
+"media.pl",
+"mil.pl",
+"nieruchomosci.pl",
+"nom.pl",
+"pc.pl",
+"powiat.pl",
+"priv.pl",
+"realestate.pl",
+"rel.pl",
+"sex.pl",
+"shop.pl",
+"sklep.pl",
+"sos.pl",
+"szkola.pl",
+"targi.pl",
+"tm.pl",
+"tourism.pl",
+"travel.pl",
+"turystyka.pl",
+"gov.pl",
+"ap.gov.pl",
+"ic.gov.pl",
+"is.gov.pl",
+"us.gov.pl",
+"kmpsp.gov.pl",
+"kppsp.gov.pl",
+"kwpsp.gov.pl",
+"psp.gov.pl",
+"wskr.gov.pl",
+"kwp.gov.pl",
+"mw.gov.pl",
+"ug.gov.pl",
+"um.gov.pl",
+"umig.gov.pl",
+"ugim.gov.pl",
+"upow.gov.pl",
+"uw.gov.pl",
+"starostwo.gov.pl",
+"pa.gov.pl",
+"po.gov.pl",
+"psse.gov.pl",
+"pup.gov.pl",
+"rzgw.gov.pl",
+"sa.gov.pl",
+"so.gov.pl",
+"sr.gov.pl",
+"wsa.gov.pl",
+"sko.gov.pl",
+"uzs.gov.pl",
+"wiih.gov.pl",
+"winb.gov.pl",
+"pinb.gov.pl",
+"wios.gov.pl",
+"witd.gov.pl",
+"wzmiuw.gov.pl",
+"piw.gov.pl",
+"wiw.gov.pl",
+"griw.gov.pl",
+"wif.gov.pl",
+"oum.gov.pl",
+"sdn.gov.pl",
+"zp.gov.pl",
+"uppo.gov.pl",
+"mup.gov.pl",
+"wuoz.gov.pl",
+"konsulat.gov.pl",
+"oirm.gov.pl",
+"augustow.pl",
+"babia-gora.pl",
+"bedzin.pl",
+"beskidy.pl",
+"bialowieza.pl",
+"bialystok.pl",
+"bielawa.pl",
+"bieszczady.pl",
+"boleslawiec.pl",
+"bydgoszcz.pl",
+"bytom.pl",
+"cieszyn.pl",
+"czeladz.pl",
+"czest.pl",
+"dlugoleka.pl",
+"elblag.pl",
+"elk.pl",
+"glogow.pl",
+"gniezno.pl",
+"gorlice.pl",
+"grajewo.pl",
+"ilawa.pl",
+"jaworzno.pl",
+"jelenia-gora.pl",
+"jgora.pl",
+"kalisz.pl",
+"kazimierz-dolny.pl",
+"karpacz.pl",
+"kartuzy.pl",
+"kaszuby.pl",
+"katowice.pl",
+"kepno.pl",
+"ketrzyn.pl",
+"klodzko.pl",
+"kobierzyce.pl",
+"kolobrzeg.pl",
+"konin.pl",
+"konskowola.pl",
+"kutno.pl",
+"lapy.pl",
+"lebork.pl",
+"legnica.pl",
+"lezajsk.pl",
+"limanowa.pl",
+"lomza.pl",
+"lowicz.pl",
+"lubin.pl",
+"lukow.pl",
+"malbork.pl",
+"malopolska.pl",
+"mazowsze.pl",
+"mazury.pl",
+"mielec.pl",
+"mielno.pl",
+"mragowo.pl",
+"naklo.pl",
+"nowaruda.pl",
+"nysa.pl",
+"olawa.pl",
+"olecko.pl",
+"olkusz.pl",
+"olsztyn.pl",
+"opoczno.pl",
+"opole.pl",
+"ostroda.pl",
+"ostroleka.pl",
+"ostrowiec.pl",
+"ostrowwlkp.pl",
+"pila.pl",
+"pisz.pl",
+"podhale.pl",
+"podlasie.pl",
+"polkowice.pl",
+"pomorze.pl",
+"pomorskie.pl",
+"prochowice.pl",
+"pruszkow.pl",
+"przeworsk.pl",
+"pulawy.pl",
+"radom.pl",
+"rawa-maz.pl",
+"rybnik.pl",
+"rzeszow.pl",
+"sanok.pl",
+"sejny.pl",
+"slask.pl",
+"slupsk.pl",
+"sosnowiec.pl",
+"stalowa-wola.pl",
+"skoczow.pl",
+"starachowice.pl",
+"stargard.pl",
+"suwalki.pl",
+"swidnica.pl",
+"swiebodzin.pl",
+"swinoujscie.pl",
+"szczecin.pl",
+"szczytno.pl",
+"tarnobrzeg.pl",
+"tgory.pl",
+"turek.pl",
+"tychy.pl",
+"ustka.pl",
+"walbrzych.pl",
+"warmia.pl",
+"warszawa.pl",
+"waw.pl",
+"wegrow.pl",
+"wielun.pl",
+"wlocl.pl",
+"wloclawek.pl",
+"wodzislaw.pl",
+"wolomin.pl",
+"wroclaw.pl",
+"zachpomor.pl",
+"zagan.pl",
+"zarow.pl",
+"zgora.pl",
+"zgorzelec.pl",
+"pm",
+"pn",
+"gov.pn",
+"co.pn",
+"org.pn",
+"edu.pn",
+"net.pn",
+"post",
+"pr",
+"com.pr",
+"net.pr",
+"org.pr",
+"gov.pr",
+"edu.pr",
+"isla.pr",
+"pro.pr",
+"biz.pr",
+"info.pr",
+"name.pr",
+"est.pr",
+"prof.pr",
+"ac.pr",
+"pro",
+"aaa.pro",
+"aca.pro",
+"acct.pro",
+"avocat.pro",
+"bar.pro",
+"cpa.pro",
+"eng.pro",
+"jur.pro",
+"law.pro",
+"med.pro",
+"recht.pro",
+"ps",
+"edu.ps",
+"gov.ps",
+"sec.ps",
+"plo.ps",
+"com.ps",
+"org.ps",
+"net.ps",
+"pt",
+"net.pt",
+"gov.pt",
+"org.pt",
+"edu.pt",
+"int.pt",
+"publ.pt",
+"com.pt",
+"nome.pt",
+"pw",
+"co.pw",
+"ne.pw",
+"or.pw",
+"ed.pw",
+"go.pw",
+"belau.pw",
+"py",
+"com.py",
+"coop.py",
+"edu.py",
+"gov.py",
+"mil.py",
+"net.py",
+"org.py",
+"qa",
+"com.qa",
+"edu.qa",
+"gov.qa",
+"mil.qa",
+"name.qa",
+"net.qa",
+"org.qa",
+"sch.qa",
+"re",
+"asso.re",
+"com.re",
+"nom.re",
+"ro",
+"arts.ro",
+"com.ro",
+"firm.ro",
+"info.ro",
+"nom.ro",
+"nt.ro",
+"org.ro",
+"rec.ro",
+"store.ro",
+"tm.ro",
+"www.ro",
+"rs",
+"ac.rs",
+"co.rs",
+"edu.rs",
+"gov.rs",
+"in.rs",
+"org.rs",
+"ru",
+"rw",
+"ac.rw",
+"co.rw",
+"coop.rw",
+"gov.rw",
+"mil.rw",
+"net.rw",
+"org.rw",
+"sa",
+"com.sa",
+"net.sa",
+"org.sa",
+"gov.sa",
+"med.sa",
+"pub.sa",
+"edu.sa",
+"sch.sa",
+"sb",
+"com.sb",
+"edu.sb",
+"gov.sb",
+"net.sb",
+"org.sb",
+"sc",
+"com.sc",
+"gov.sc",
+"net.sc",
+"org.sc",
+"edu.sc",
+"sd",
+"com.sd",
+"net.sd",
+"org.sd",
+"edu.sd",
+"med.sd",
+"tv.sd",
+"gov.sd",
+"info.sd",
+"se",
+"a.se",
+"ac.se",
+"b.se",
+"bd.se",
+"brand.se",
+"c.se",
+"d.se",
+"e.se",
+"f.se",
+"fh.se",
+"fhsk.se",
+"fhv.se",
+"g.se",
+"h.se",
+"i.se",
+"k.se",
+"komforb.se",
+"kommunalforbund.se",
+"komvux.se",
+"l.se",
+"lanbib.se",
+"m.se",
+"n.se",
+"naturbruksgymn.se",
+"o.se",
+"org.se",
+"p.se",
+"parti.se",
+"pp.se",
+"press.se",
+"r.se",
+"s.se",
+"t.se",
+"tm.se",
+"u.se",
+"w.se",
+"x.se",
+"y.se",
+"z.se",
+"sg",
+"com.sg",
+"net.sg",
+"org.sg",
+"gov.sg",
+"edu.sg",
+"per.sg",
+"sh",
+"com.sh",
+"net.sh",
+"gov.sh",
+"org.sh",
+"mil.sh",
+"si",
+"sj",
+"sk",
+"sl",
+"com.sl",
+"net.sl",
+"edu.sl",
+"gov.sl",
+"org.sl",
+"sm",
+"sn",
+"art.sn",
+"com.sn",
+"edu.sn",
+"gouv.sn",
+"org.sn",
+"perso.sn",
+"univ.sn",
+"so",
+"com.so",
+"edu.so",
+"gov.so",
+"me.so",
+"net.so",
+"org.so",
+"sr",
+"ss",
+"biz.ss",
+"com.ss",
+"edu.ss",
+"gov.ss",
+"net.ss",
+"org.ss",
+"st",
+"co.st",
+"com.st",
+"consulado.st",
+"edu.st",
+"embaixada.st",
+"gov.st",
+"mil.st",
+"net.st",
+"org.st",
+"principe.st",
+"saotome.st",
+"store.st",
+"su",
+"sv",
+"com.sv",
+"edu.sv",
+"gob.sv",
+"org.sv",
+"red.sv",
+"sx",
+"gov.sx",
+"sy",
+"edu.sy",
+"gov.sy",
+"net.sy",
+"mil.sy",
+"com.sy",
+"org.sy",
+"sz",
+"co.sz",
+"ac.sz",
+"org.sz",
+"tc",
+"td",
+"tel",
+"tf",
+"tg",
+"th",
+"ac.th",
+"co.th",
+"go.th",
+"in.th",
+"mi.th",
+"net.th",
+"or.th",
+"tj",
+"ac.tj",
+"biz.tj",
+"co.tj",
+"com.tj",
+"edu.tj",
+"go.tj",
+"gov.tj",
+"int.tj",
+"mil.tj",
+"name.tj",
+"net.tj",
+"nic.tj",
+"org.tj",
+"test.tj",
+"web.tj",
+"tk",
+"tl",
+"gov.tl",
+"tm",
+"com.tm",
+"co.tm",
+"org.tm",
+"net.tm",
+"nom.tm",
+"gov.tm",
+"mil.tm",
+"edu.tm",
+"tn",
+"com.tn",
+"ens.tn",
+"fin.tn",
+"gov.tn",
+"ind.tn",
+"intl.tn",
+"nat.tn",
+"net.tn",
+"org.tn",
+"info.tn",
+"perso.tn",
+"tourism.tn",
+"edunet.tn",
+"rnrt.tn",
+"rns.tn",
+"rnu.tn",
+"mincom.tn",
+"agrinet.tn",
+"defense.tn",
+"turen.tn",
+"to",
+"com.to",
+"gov.to",
+"net.to",
+"org.to",
+"edu.to",
+"mil.to",
+"tr",
+"av.tr",
+"bbs.tr",
+"bel.tr",
+"biz.tr",
+"com.tr",
+"dr.tr",
+"edu.tr",
+"gen.tr",
+"gov.tr",
+"info.tr",
+"mil.tr",
+"k12.tr",
+"kep.tr",
+"name.tr",
+"net.tr",
+"org.tr",
+"pol.tr",
+"tel.tr",
+"tsk.tr",
+"tv.tr",
+"web.tr",
+"nc.tr",
+"gov.nc.tr",
+"tt",
+"co.tt",
+"com.tt",
+"org.tt",
+"net.tt",
+"biz.tt",
+"info.tt",
+"pro.tt",
+"int.tt",
+"coop.tt",
+"jobs.tt",
+"mobi.tt",
+"travel.tt",
+"museum.tt",
+"aero.tt",
+"name.tt",
+"gov.tt",
+"edu.tt",
+"tv",
+"tw",
+"edu.tw",
+"gov.tw",
+"mil.tw",
+"com.tw",
+"net.tw",
+"org.tw",
+"idv.tw",
+"game.tw",
+"ebiz.tw",
+"club.tw",
+"網路.tw",
+"組織.tw",
+"商業.tw",
+"tz",
+"ac.tz",
+"co.tz",
+"go.tz",
+"hotel.tz",
+"info.tz",
+"me.tz",
+"mil.tz",
+"mobi.tz",
+"ne.tz",
+"or.tz",
+"sc.tz",
+"tv.tz",
+"ua",
+"com.ua",
+"edu.ua",
+"gov.ua",
+"in.ua",
+"net.ua",
+"org.ua",
+"cherkassy.ua",
+"cherkasy.ua",
+"chernigov.ua",
+"chernihiv.ua",
+"chernivtsi.ua",
+"chernovtsy.ua",
+"ck.ua",
+"cn.ua",
+"cr.ua",
+"crimea.ua",
+"cv.ua",
+"dn.ua",
+"dnepropetrovsk.ua",
+"dnipropetrovsk.ua",
+"dominic.ua",
+"donetsk.ua",
+"dp.ua",
+"if.ua",
+"ivano-frankivsk.ua",
+"kh.ua",
+"kharkiv.ua",
+"kharkov.ua",
+"kherson.ua",
+"khmelnitskiy.ua",
+"khmelnytskyi.ua",
+"kiev.ua",
+"kirovograd.ua",
+"km.ua",
+"kr.ua",
+"krym.ua",
+"ks.ua",
+"kv.ua",
+"kyiv.ua",
+"lg.ua",
+"lt.ua",
+"lugansk.ua",
+"lutsk.ua",
+"lv.ua",
+"lviv.ua",
+"mk.ua",
+"mykolaiv.ua",
+"nikolaev.ua",
+"od.ua",
+"odesa.ua",
+"odessa.ua",
+"pl.ua",
+"poltava.ua",
+"rivne.ua",
+"rovno.ua",
+"rv.ua",
+"sb.ua",
+"sebastopol.ua",
+"sevastopol.ua",
+"sm.ua",
+"sumy.ua",
+"te.ua",
+"ternopil.ua",
+"uz.ua",
+"uzhgorod.ua",
+"vinnica.ua",
+"vinnytsia.ua",
+"vn.ua",
+"volyn.ua",
+"yalta.ua",
+"zaporizhzhe.ua",
+"zaporizhzhia.ua",
+"zhitomir.ua",
+"zhytomyr.ua",
+"zp.ua",
+"zt.ua",
+"ug",
+"co.ug",
+"or.ug",
+"ac.ug",
+"sc.ug",
+"go.ug",
+"ne.ug",
+"com.ug",
+"org.ug",
+"uk",
+"ac.uk",
+"co.uk",
+"gov.uk",
+"ltd.uk",
+"me.uk",
+"net.uk",
+"nhs.uk",
+"org.uk",
+"plc.uk",
+"police.uk",
+"*.sch.uk",
+"us",
+"dni.us",
+"fed.us",
+"isa.us",
+"kids.us",
+"nsn.us",
+"ak.us",
+"al.us",
+"ar.us",
+"as.us",
+"az.us",
+"ca.us",
+"co.us",
+"ct.us",
+"dc.us",
+"de.us",
+"fl.us",
+"ga.us",
+"gu.us",
+"hi.us",
+"ia.us",
+"id.us",
+"il.us",
+"in.us",
+"ks.us",
+"ky.us",
+"la.us",
+"ma.us",
+"md.us",
+"me.us",
+"mi.us",
+"mn.us",
+"mo.us",
+"ms.us",
+"mt.us",
+"nc.us",
+"nd.us",
+"ne.us",
+"nh.us",
+"nj.us",
+"nm.us",
+"nv.us",
+"ny.us",
+"oh.us",
+"ok.us",
+"or.us",
+"pa.us",
+"pr.us",
+"ri.us",
+"sc.us",
+"sd.us",
+"tn.us",
+"tx.us",
+"ut.us",
+"vi.us",
+"vt.us",
+"va.us",
+"wa.us",
+"wi.us",
+"wv.us",
+"wy.us",
+"k12.ak.us",
+"k12.al.us",
+"k12.ar.us",
+"k12.as.us",
+"k12.az.us",
+"k12.ca.us",
+"k12.co.us",
+"k12.ct.us",
+"k12.dc.us",
+"k12.de.us",
+"k12.fl.us",
+"k12.ga.us",
+"k12.gu.us",
+"k12.ia.us",
+"k12.id.us",
+"k12.il.us",
+"k12.in.us",
+"k12.ks.us",
+"k12.ky.us",
+"k12.la.us",
+"k12.ma.us",
+"k12.md.us",
+"k12.me.us",
+"k12.mi.us",
+"k12.mn.us",
+"k12.mo.us",
+"k12.ms.us",
+"k12.mt.us",
+"k12.nc.us",
+"k12.ne.us",
+"k12.nh.us",
+"k12.nj.us",
+"k12.nm.us",
+"k12.nv.us",
+"k12.ny.us",
+"k12.oh.us",
+"k12.ok.us",
+"k12.or.us",
+"k12.pa.us",
+"k12.pr.us",
+"k12.ri.us",
+"k12.sc.us",
+"k12.tn.us",
+"k12.tx.us",
+"k12.ut.us",
+"k12.vi.us",
+"k12.vt.us",
+"k12.va.us",
+"k12.wa.us",
+"k12.wi.us",
+"k12.wy.us",
+"cc.ak.us",
+"cc.al.us",
+"cc.ar.us",
+"cc.as.us",
+"cc.az.us",
+"cc.ca.us",
+"cc.co.us",
+"cc.ct.us",
+"cc.dc.us",
+"cc.de.us",
+"cc.fl.us",
+"cc.ga.us",
+"cc.gu.us",
+"cc.hi.us",
+"cc.ia.us",
+"cc.id.us",
+"cc.il.us",
+"cc.in.us",
+"cc.ks.us",
+"cc.ky.us",
+"cc.la.us",
+"cc.ma.us",
+"cc.md.us",
+"cc.me.us",
+"cc.mi.us",
+"cc.mn.us",
+"cc.mo.us",
+"cc.ms.us",
+"cc.mt.us",
+"cc.nc.us",
+"cc.nd.us",
+"cc.ne.us",
+"cc.nh.us",
+"cc.nj.us",
+"cc.nm.us",
+"cc.nv.us",
+"cc.ny.us",
+"cc.oh.us",
+"cc.ok.us",
+"cc.or.us",
+"cc.pa.us",
+"cc.pr.us",
+"cc.ri.us",
+"cc.sc.us",
+"cc.sd.us",
+"cc.tn.us",
+"cc.tx.us",
+"cc.ut.us",
+"cc.vi.us",
+"cc.vt.us",
+"cc.va.us",
+"cc.wa.us",
+"cc.wi.us",
+"cc.wv.us",
+"cc.wy.us",
+"lib.ak.us",
+"lib.al.us",
+"lib.ar.us",
+"lib.as.us",
+"lib.az.us",
+"lib.ca.us",
+"lib.co.us",
+"lib.ct.us",
+"lib.dc.us",
+"lib.fl.us",
+"lib.ga.us",
+"lib.gu.us",
+"lib.hi.us",
+"lib.ia.us",
+"lib.id.us",
+"lib.il.us",
+"lib.in.us",
+"lib.ks.us",
+"lib.ky.us",
+"lib.la.us",
+"lib.ma.us",
+"lib.md.us",
+"lib.me.us",
+"lib.mi.us",
+"lib.mn.us",
+"lib.mo.us",
+"lib.ms.us",
+"lib.mt.us",
+"lib.nc.us",
+"lib.nd.us",
+"lib.ne.us",
+"lib.nh.us",
+"lib.nj.us",
+"lib.nm.us",
+"lib.nv.us",
+"lib.ny.us",
+"lib.oh.us",
+"lib.ok.us",
+"lib.or.us",
+"lib.pa.us",
+"lib.pr.us",
+"lib.ri.us",
+"lib.sc.us",
+"lib.sd.us",
+"lib.tn.us",
+"lib.tx.us",
+"lib.ut.us",
+"lib.vi.us",
+"lib.vt.us",
+"lib.va.us",
+"lib.wa.us",
+"lib.wi.us",
+"lib.wy.us",
+"pvt.k12.ma.us",
+"chtr.k12.ma.us",
+"paroch.k12.ma.us",
+"ann-arbor.mi.us",
+"cog.mi.us",
+"dst.mi.us",
+"eaton.mi.us",
+"gen.mi.us",
+"mus.mi.us",
+"tec.mi.us",
+"washtenaw.mi.us",
+"uy",
+"com.uy",
+"edu.uy",
+"gub.uy",
+"mil.uy",
+"net.uy",
+"org.uy",
+"uz",
+"co.uz",
+"com.uz",
+"net.uz",
+"org.uz",
+"va",
+"vc",
+"com.vc",
+"net.vc",
+"org.vc",
+"gov.vc",
+"mil.vc",
+"edu.vc",
+"ve",
+"arts.ve",
+"co.ve",
+"com.ve",
+"e12.ve",
+"edu.ve",
+"firm.ve",
+"gob.ve",
+"gov.ve",
+"info.ve",
+"int.ve",
+"mil.ve",
+"net.ve",
+"org.ve",
+"rec.ve",
+"store.ve",
+"tec.ve",
+"web.ve",
+"vg",
+"vi",
+"co.vi",
+"com.vi",
+"k12.vi",
+"net.vi",
+"org.vi",
+"vn",
+"com.vn",
+"net.vn",
+"org.vn",
+"edu.vn",
+"gov.vn",
+"int.vn",
+"ac.vn",
+"biz.vn",
+"info.vn",
+"name.vn",
+"pro.vn",
+"health.vn",
+"vu",
+"com.vu",
+"edu.vu",
+"net.vu",
+"org.vu",
+"wf",
+"ws",
+"com.ws",
+"net.ws",
+"org.ws",
+"gov.ws",
+"edu.ws",
+"yt",
+"امارات",
+"հայ",
+"বাংলা",
+"бг",
+"бел",
+"中国",
+"中國",
+"الجزائر",
+"مصر",
+"ею",
+"ευ",
+"موريتانيا",
+"გე",
+"ελ",
+"香港",
+"公司.香港",
+"教育.香港",
+"政府.香港",
+"個人.香港",
+"網絡.香港",
+"組織.香港",
+"ಭಾರತ",
+"ଭାରତ",
+"ভাৰত",
+"भारतम्",
+"भारोत",
+"ڀارت",
+"ഭാരതം",
+"भारत",
+"بارت",
+"بھارت",
+"భారత్",
+"ભારત",
+"ਭਾਰਤ",
+"ভারত",
+"இந்தியா",
+"ایران",
+"ايران",
+"عراق",
+"الاردن",
+"한국",
+"қаз",
+"ලංකා",
+"இலங்கை",
+"المغرب",
+"мкд",
+"мон",
+"澳門",
+"澳门",
+"مليسيا",
+"عمان",
+"پاکستان",
+"پاكستان",
+"فلسطين",
+"срб",
+"пр.срб",
+"орг.срб",
+"обр.срб",
+"од.срб",
+"упр.срб",
+"ак.срб",
+"рф",
+"قطر",
+"السعودية",
+"السعودیة",
+"السعودیۃ",
+"السعوديه",
+"سودان",
+"新加坡",
+"சிங்கப்பூர்",
+"سورية",
+"سوريا",
+"ไทย",
+"ศึกษา.ไทย",
+"ธุรกิจ.ไทย",
+"รัฐบาล.ไทย",
+"ทหาร.ไทย",
+"เน็ต.ไทย",
+"องค์กร.ไทย",
+"تونس",
+"台灣",
+"台湾",
+"臺灣",
+"укр",
+"اليمن",
+"xxx",
+"*.ye",
+"ac.za",
+"agric.za",
+"alt.za",
+"co.za",
+"edu.za",
+"gov.za",
+"grondar.za",
+"law.za",
+"mil.za",
+"net.za",
+"ngo.za",
+"nic.za",
+"nis.za",
+"nom.za",
+"org.za",
+"school.za",
+"tm.za",
+"web.za",
+"zm",
+"ac.zm",
+"biz.zm",
+"co.zm",
+"com.zm",
+"edu.zm",
+"gov.zm",
+"info.zm",
+"mil.zm",
+"net.zm",
+"org.zm",
+"sch.zm",
+"zw",
+"ac.zw",
+"co.zw",
+"gov.zw",
+"mil.zw",
+"org.zw",
+"aaa",
+"aarp",
+"abarth",
+"abb",
+"abbott",
+"abbvie",
+"abc",
+"able",
+"abogado",
+"abudhabi",
+"academy",
+"accenture",
+"accountant",
+"accountants",
+"aco",
+"actor",
+"adac",
+"ads",
+"adult",
+"aeg",
+"aetna",
+"afamilycompany",
+"afl",
+"africa",
+"agakhan",
+"agency",
+"aig",
+"aigo",
+"airbus",
+"airforce",
+"airtel",
+"akdn",
+"alfaromeo",
+"alibaba",
+"alipay",
+"allfinanz",
+"allstate",
+"ally",
+"alsace",
+"alstom",
+"amazon",
+"americanexpress",
+"americanfamily",
+"amex",
+"amfam",
+"amica",
+"amsterdam",
+"analytics",
+"android",
+"anquan",
+"anz",
+"aol",
+"apartments",
+"app",
+"apple",
+"aquarelle",
+"arab",
+"aramco",
+"archi",
+"army",
+"art",
+"arte",
+"asda",
+"associates",
+"athleta",
+"attorney",
+"auction",
+"audi",
+"audible",
+"audio",
+"auspost",
+"author",
+"auto",
+"autos",
+"avianca",
+"aws",
+"axa",
+"azure",
+"baby",
+"baidu",
+"banamex",
+"bananarepublic",
+"band",
+"bank",
+"bar",
+"barcelona",
+"barclaycard",
+"barclays",
+"barefoot",
+"bargains",
+"baseball",
+"basketball",
+"bauhaus",
+"bayern",
+"bbc",
+"bbt",
+"bbva",
+"bcg",
+"bcn",
+"beats",
+"beauty",
+"beer",
+"bentley",
+"berlin",
+"best",
+"bestbuy",
+"bet",
+"bharti",
+"bible",
+"bid",
+"bike",
+"bing",
+"bingo",
+"bio",
+"black",
+"blackfriday",
+"blockbuster",
+"blog",
+"bloomberg",
+"blue",
+"bms",
+"bmw",
+"bnpparibas",
+"boats",
+"boehringer",
+"bofa",
+"bom",
+"bond",
+"boo",
+"book",
+"booking",
+"bosch",
+"bostik",
+"boston",
+"bot",
+"boutique",
+"box",
+"bradesco",
+"bridgestone",
+"broadway",
+"broker",
+"brother",
+"brussels",
+"budapest",
+"bugatti",
+"build",
+"builders",
+"business",
+"buy",
+"buzz",
+"bzh",
+"cab",
+"cafe",
+"cal",
+"call",
+"calvinklein",
+"cam",
+"camera",
+"camp",
+"cancerresearch",
+"canon",
+"capetown",
+"capital",
+"capitalone",
+"car",
+"caravan",
+"cards",
+"care",
+"career",
+"careers",
+"cars",
+"casa",
+"case",
+"caseih",
+"cash",
+"casino",
+"catering",
+"catholic",
+"cba",
+"cbn",
+"cbre",
+"cbs",
+"ceb",
+"center",
+"ceo",
+"cern",
+"cfa",
+"cfd",
+"chanel",
+"channel",
+"charity",
+"chase",
+"chat",
+"cheap",
+"chintai",
+"christmas",
+"chrome",
+"church",
+"cipriani",
+"circle",
+"cisco",
+"citadel",
+"citi",
+"citic",
+"city",
+"cityeats",
+"claims",
+"cleaning",
+"click",
+"clinic",
+"clinique",
+"clothing",
+"cloud",
+"club",
+"clubmed",
+"coach",
+"codes",
+"coffee",
+"college",
+"cologne",
+"comcast",
+"commbank",
+"community",
+"company",
+"compare",
+"computer",
+"comsec",
+"condos",
+"construction",
+"consulting",
+"contact",
+"contractors",
+"cooking",
+"cookingchannel",
+"cool",
+"corsica",
+"country",
+"coupon",
+"coupons",
+"courses",
+"cpa",
+"credit",
+"creditcard",
+"creditunion",
+"cricket",
+"crown",
+"crs",
+"cruise",
+"cruises",
+"csc",
+"cuisinella",
+"cymru",
+"cyou",
+"dabur",
+"dad",
+"dance",
+"data",
+"date",
+"dating",
+"datsun",
+"day",
+"dclk",
+"dds",
+"deal",
+"dealer",
+"deals",
+"degree",
+"delivery",
+"dell",
+"deloitte",
+"delta",
+"democrat",
+"dental",
+"dentist",
+"desi",
+"design",
+"dev",
+"dhl",
+"diamonds",
+"diet",
+"digital",
+"direct",
+"directory",
+"discount",
+"discover",
+"dish",
+"diy",
+"dnp",
+"docs",
+"doctor",
+"dog",
+"domains",
+"dot",
+"download",
+"drive",
+"dtv",
+"dubai",
+"duck",
+"dunlop",
+"dupont",
+"durban",
+"dvag",
+"dvr",
+"earth",
+"eat",
+"eco",
+"edeka",
+"education",
+"email",
+"emerck",
+"energy",
+"engineer",
+"engineering",
+"enterprises",
+"epson",
+"equipment",
+"ericsson",
+"erni",
+"esq",
+"estate",
+"esurance",
+"etisalat",
+"eurovision",
+"eus",
+"events",
+"exchange",
+"expert",
+"exposed",
+"express",
+"extraspace",
+"fage",
+"fail",
+"fairwinds",
+"faith",
+"family",
+"fan",
+"fans",
+"farm",
+"farmers",
+"fashion",
+"fast",
+"fedex",
+"feedback",
+"ferrari",
+"ferrero",
+"fiat",
+"fidelity",
+"fido",
+"film",
+"final",
+"finance",
+"financial",
+"fire",
+"firestone",
+"firmdale",
+"fish",
+"fishing",
+"fit",
+"fitness",
+"flickr",
+"flights",
+"flir",
+"florist",
+"flowers",
+"fly",
+"foo",
+"food",
+"foodnetwork",
+"football",
+"ford",
+"forex",
+"forsale",
+"forum",
+"foundation",
+"fox",
+"free",
+"fresenius",
+"frl",
+"frogans",
+"frontdoor",
+"frontier",
+"ftr",
+"fujitsu",
+"fujixerox",
+"fun",
+"fund",
+"furniture",
+"futbol",
+"fyi",
+"gal",
+"gallery",
+"gallo",
+"gallup",
+"game",
+"games",
+"gap",
+"garden",
+"gay",
+"gbiz",
+"gdn",
+"gea",
+"gent",
+"genting",
+"george",
+"ggee",
+"gift",
+"gifts",
+"gives",
+"giving",
+"glade",
+"glass",
+"gle",
+"global",
+"globo",
+"gmail",
+"gmbh",
+"gmo",
+"gmx",
+"godaddy",
+"gold",
+"goldpoint",
+"golf",
+"goo",
+"goodyear",
+"goog",
+"google",
+"gop",
+"got",
+"grainger",
+"graphics",
+"gratis",
+"green",
+"gripe",
+"grocery",
+"group",
+"guardian",
+"gucci",
+"guge",
+"guide",
+"guitars",
+"guru",
+"hair",
+"hamburg",
+"hangout",
+"haus",
+"hbo",
+"hdfc",
+"hdfcbank",
+"health",
+"healthcare",
+"help",
+"helsinki",
+"here",
+"hermes",
+"hgtv",
+"hiphop",
+"hisamitsu",
+"hitachi",
+"hiv",
+"hkt",
+"hockey",
+"holdings",
+"holiday",
+"homedepot",
+"homegoods",
+"homes",
+"homesense",
+"honda",
+"horse",
+"hospital",
+"host",
+"hosting",
+"hot",
+"hoteles",
+"hotels",
+"hotmail",
+"house",
+"how",
+"hsbc",
+"hughes",
+"hyatt",
+"hyundai",
+"ibm",
+"icbc",
+"ice",
+"icu",
+"ieee",
+"ifm",
+"ikano",
+"imamat",
+"imdb",
+"immo",
+"immobilien",
+"inc",
+"industries",
+"infiniti",
+"ing",
+"ink",
+"institute",
+"insurance",
+"insure",
+"intel",
+"international",
+"intuit",
+"investments",
+"ipiranga",
+"irish",
+"ismaili",
+"ist",
+"istanbul",
+"itau",
+"itv",
+"iveco",
+"jaguar",
+"java",
+"jcb",
+"jcp",
+"jeep",
+"jetzt",
+"jewelry",
+"jio",
+"jll",
+"jmp",
+"jnj",
+"joburg",
+"jot",
+"joy",
+"jpmorgan",
+"jprs",
+"juegos",
+"juniper",
+"kaufen",
+"kddi",
+"kerryhotels",
+"kerrylogistics",
+"kerryproperties",
+"kfh",
+"kia",
+"kim",
+"kinder",
+"kindle",
+"kitchen",
+"kiwi",
+"koeln",
+"komatsu",
+"kosher",
+"kpmg",
+"kpn",
+"krd",
+"kred",
+"kuokgroup",
+"kyoto",
+"lacaixa",
+"lamborghini",
+"lamer",
+"lancaster",
+"lancia",
+"land",
+"landrover",
+"lanxess",
+"lasalle",
+"lat",
+"latino",
+"latrobe",
+"law",
+"lawyer",
+"lds",
+"lease",
+"leclerc",
+"lefrak",
+"legal",
+"lego",
+"lexus",
+"lgbt",
+"lidl",
+"life",
+"lifeinsurance",
+"lifestyle",
+"lighting",
+"like",
+"lilly",
+"limited",
+"limo",
+"lincoln",
+"linde",
+"link",
+"lipsy",
+"live",
+"living",
+"lixil",
+"llc",
+"llp",
+"loan",
+"loans",
+"locker",
+"locus",
+"loft",
+"lol",
+"london",
+"lotte",
+"lotto",
+"love",
+"lpl",
+"lplfinancial",
+"ltd",
+"ltda",
+"lundbeck",
+"lupin",
+"luxe",
+"luxury",
+"macys",
+"madrid",
+"maif",
+"maison",
+"makeup",
+"man",
+"management",
+"mango",
+"map",
+"market",
+"marketing",
+"markets",
+"marriott",
+"marshalls",
+"maserati",
+"mattel",
+"mba",
+"mckinsey",
+"med",
+"media",
+"meet",
+"melbourne",
+"meme",
+"memorial",
+"men",
+"menu",
+"merckmsd",
+"metlife",
+"miami",
+"microsoft",
+"mini",
+"mint",
+"mit",
+"mitsubishi",
+"mlb",
+"mls",
+"mma",
+"mobile",
+"moda",
+"moe",
+"moi",
+"mom",
+"monash",
+"money",
+"monster",
+"mormon",
+"mortgage",
+"moscow",
+"moto",
+"motorcycles",
+"mov",
+"movie",
+"msd",
+"mtn",
+"mtr",
+"mutual",
+"nab",
+"nadex",
+"nagoya",
+"nationwide",
+"natura",
+"navy",
+"nba",
+"nec",
+"netbank",
+"netflix",
+"network",
+"neustar",
+"new",
+"newholland",
+"news",
+"next",
+"nextdirect",
+"nexus",
+"nfl",
+"ngo",
+"nhk",
+"nico",
+"nike",
+"nikon",
+"ninja",
+"nissan",
+"nissay",
+"nokia",
+"northwesternmutual",
+"norton",
+"now",
+"nowruz",
+"nowtv",
+"nra",
+"nrw",
+"ntt",
+"nyc",
+"obi",
+"observer",
+"off",
+"office",
+"okinawa",
+"olayan",
+"olayangroup",
+"oldnavy",
+"ollo",
+"omega",
+"one",
+"ong",
+"onl",
+"online",
+"onyourside",
+"ooo",
+"open",
+"oracle",
+"orange",
+"organic",
+"origins",
+"osaka",
+"otsuka",
+"ott",
+"ovh",
+"page",
+"panasonic",
+"paris",
+"pars",
+"partners",
+"parts",
+"party",
+"passagens",
+"pay",
+"pccw",
+"pet",
+"pfizer",
+"pharmacy",
+"phd",
+"philips",
+"phone",
+"photo",
+"photography",
+"photos",
+"physio",
+"pics",
+"pictet",
+"pictures",
+"pid",
+"pin",
+"ping",
+"pink",
+"pioneer",
+"pizza",
+"place",
+"play",
+"playstation",
+"plumbing",
+"plus",
+"pnc",
+"pohl",
+"poker",
+"politie",
+"porn",
+"pramerica",
+"praxi",
+"press",
+"prime",
+"prod",
+"productions",
+"prof",
+"progressive",
+"promo",
+"properties",
+"property",
+"protection",
+"pru",
+"prudential",
+"pub",
+"pwc",
+"qpon",
+"quebec",
+"quest",
+"qvc",
+"racing",
+"radio",
+"raid",
+"read",
+"realestate",
+"realtor",
+"realty",
+"recipes",
+"red",
+"redstone",
+"redumbrella",
+"rehab",
+"reise",
+"reisen",
+"reit",
+"reliance",
+"ren",
+"rent",
+"rentals",
+"repair",
+"report",
+"republican",
+"rest",
+"restaurant",
+"review",
+"reviews",
+"rexroth",
+"rich",
+"richardli",
+"ricoh",
+"rightathome",
+"ril",
+"rio",
+"rip",
+"rmit",
+"rocher",
+"rocks",
+"rodeo",
+"rogers",
+"room",
+"rsvp",
+"rugby",
+"ruhr",
+"run",
+"rwe",
+"ryukyu",
+"saarland",
+"safe",
+"safety",
+"sakura",
+"sale",
+"salon",
+"samsclub",
+"samsung",
+"sandvik",
+"sandvikcoromant",
+"sanofi",
+"sap",
+"sarl",
+"sas",
+"save",
+"saxo",
+"sbi",
+"sbs",
+"sca",
+"scb",
+"schaeffler",
+"schmidt",
+"scholarships",
+"school",
+"schule",
+"schwarz",
+"science",
+"scjohnson",
+"scor",
+"scot",
+"search",
+"seat",
+"secure",
+"security",
+"seek",
+"select",
+"sener",
+"services",
+"ses",
+"seven",
+"sew",
+"sex",
+"sexy",
+"sfr",
+"shangrila",
+"sharp",
+"shaw",
+"shell",
+"shia",
+"shiksha",
+"shoes",
+"shop",
+"shopping",
+"shouji",
+"show",
+"showtime",
+"shriram",
+"silk",
+"sina",
+"singles",
+"site",
+"ski",
+"skin",
+"sky",
+"skype",
+"sling",
+"smart",
+"smile",
+"sncf",
+"soccer",
+"social",
+"softbank",
+"software",
+"sohu",
+"solar",
+"solutions",
+"song",
+"sony",
+"soy",
+"spa",
+"space",
+"sport",
+"spot",
+"spreadbetting",
+"srl",
+"stada",
+"staples",
+"star",
+"statebank",
+"statefarm",
+"stc",
+"stcgroup",
+"stockholm",
+"storage",
+"store",
+"stream",
+"studio",
+"study",
+"style",
+"sucks",
+"supplies",
+"supply",
+"support",
+"surf",
+"surgery",
+"suzuki",
+"swatch",
+"swiftcover",
+"swiss",
+"sydney",
+"symantec",
+"systems",
+"tab",
+"taipei",
+"talk",
+"taobao",
+"target",
+"tatamotors",
+"tatar",
+"tattoo",
+"tax",
+"taxi",
+"tci",
+"tdk",
+"team",
+"tech",
+"technology",
+"temasek",
+"tennis",
+"teva",
+"thd",
+"theater",
+"theatre",
+"tiaa",
+"tickets",
+"tienda",
+"tiffany",
+"tips",
+"tires",
+"tirol",
+"tjmaxx",
+"tjx",
+"tkmaxx",
+"tmall",
+"today",
+"tokyo",
+"tools",
+"top",
+"toray",
+"toshiba",
+"total",
+"tours",
+"town",
+"toyota",
+"toys",
+"trade",
+"trading",
+"training",
+"travel",
+"travelchannel",
+"travelers",
+"travelersinsurance",
+"trust",
+"trv",
+"tube",
+"tui",
+"tunes",
+"tushu",
+"tvs",
+"ubank",
+"ubs",
+"unicom",
+"university",
+"uno",
+"uol",
+"ups",
+"vacations",
+"vana",
+"vanguard",
+"vegas",
+"ventures",
+"verisign",
+"versicherung",
+"vet",
+"viajes",
+"video",
+"vig",
+"viking",
+"villas",
+"vin",
+"vip",
+"virgin",
+"visa",
+"vision",
+"viva",
+"vivo",
+"vlaanderen",
+"vodka",
+"volkswagen",
+"volvo",
+"vote",
+"voting",
+"voto",
+"voyage",
+"vuelos",
+"wales",
+"walmart",
+"walter",
+"wang",
+"wanggou",
+"watch",
+"watches",
+"weather",
+"weatherchannel",
+"webcam",
+"weber",
+"website",
+"wed",
+"wedding",
+"weibo",
+"weir",
+"whoswho",
+"wien",
+"wiki",
+"williamhill",
+"win",
+"windows",
+"wine",
+"winners",
+"wme",
+"wolterskluwer",
+"woodside",
+"work",
+"works",
+"world",
+"wow",
+"wtc",
+"wtf",
+"xbox",
+"xerox",
+"xfinity",
+"xihuan",
+"xin",
+"कॉम",
+"セール",
+"佛山",
+"慈善",
+"集团",
+"在线",
+"大众汽车",
+"点看",
+"คอม",
+"八卦",
+"موقع",
+"公益",
+"公司",
+"香格里拉",
+"网站",
+"移动",
+"我爱你",
+"москва",
+"католик",
+"онлайн",
+"сайт",
+"联通",
+"קום",
+"时尚",
+"微博",
+"淡马锡",
+"ファッション",
+"орг",
+"नेट",
+"ストア",
+"アマゾン",
+"삼성",
+"商标",
+"商店",
+"商城",
+"дети",
+"ポイント",
+"新闻",
+"工行",
+"家電",
+"كوم",
+"中文网",
+"中信",
+"娱乐",
+"谷歌",
+"電訊盈科",
+"购物",
+"クラウド",
+"通販",
+"网店",
+"संगठन",
+"餐厅",
+"网络",
+"ком",
+"亚马逊",
+"诺基亚",
+"食品",
+"飞利浦",
+"手表",
+"手机",
+"ارامكو",
+"العليان",
+"اتصالات",
+"بازار",
+"ابوظبي",
+"كاثوليك",
+"همراه",
+"닷컴",
+"政府",
+"شبكة",
+"بيتك",
+"عرب",
+"机构",
+"组织机构",
+"健康",
+"招聘",
+"рус",
+"珠宝",
+"大拿",
+"みんな",
+"グーグル",
+"世界",
+"書籍",
+"网址",
+"닷넷",
+"コム",
+"天主教",
+"游戏",
+"vermögensberater",
+"vermögensberatung",
+"企业",
+"信息",
+"嘉里大酒店",
+"嘉里",
+"广东",
+"政务",
+"xyz",
+"yachts",
+"yahoo",
+"yamaxun",
+"yandex",
+"yodobashi",
+"yoga",
+"yokohama",
+"you",
+"youtube",
+"yun",
+"zappos",
+"zara",
+"zero",
+"zip",
+"zone",
+"zuerich",
+"cc.ua",
+"inf.ua",
+"ltd.ua",
+"adobeaemcloud.com",
+"adobeaemcloud.net",
+"*.dev.adobeaemcloud.com",
+"beep.pl",
+"barsy.ca",
+"*.compute.estate",
+"*.alces.network",
+"altervista.org",
+"alwaysdata.net",
+"cloudfront.net",
+"*.compute.amazonaws.com",
+"*.compute-1.amazonaws.com",
+"*.compute.amazonaws.com.cn",
+"us-east-1.amazonaws.com",
+"cn-north-1.eb.amazonaws.com.cn",
+"cn-northwest-1.eb.amazonaws.com.cn",
+"elasticbeanstalk.com",
+"ap-northeast-1.elasticbeanstalk.com",
+"ap-northeast-2.elasticbeanstalk.com",
+"ap-northeast-3.elasticbeanstalk.com",
+"ap-south-1.elasticbeanstalk.com",
+"ap-southeast-1.elasticbeanstalk.com",
+"ap-southeast-2.elasticbeanstalk.com",
+"ca-central-1.elasticbeanstalk.com",
+"eu-central-1.elasticbeanstalk.com",
+"eu-west-1.elasticbeanstalk.com",
+"eu-west-2.elasticbeanstalk.com",
+"eu-west-3.elasticbeanstalk.com",
+"sa-east-1.elasticbeanstalk.com",
+"us-east-1.elasticbeanstalk.com",
+"us-east-2.elasticbeanstalk.com",
+"us-gov-west-1.elasticbeanstalk.com",
+"us-west-1.elasticbeanstalk.com",
+"us-west-2.elasticbeanstalk.com",
+"*.elb.amazonaws.com",
+"*.elb.amazonaws.com.cn",
+"s3.amazonaws.com",
+"s3-ap-northeast-1.amazonaws.com",
+"s3-ap-northeast-2.amazonaws.com",
+"s3-ap-south-1.amazonaws.com",
+"s3-ap-southeast-1.amazonaws.com",
+"s3-ap-southeast-2.amazonaws.com",
+"s3-ca-central-1.amazonaws.com",
+"s3-eu-central-1.amazonaws.com",
+"s3-eu-west-1.amazonaws.com",
+"s3-eu-west-2.amazonaws.com",
+"s3-eu-west-3.amazonaws.com",
+"s3-external-1.amazonaws.com",
+"s3-fips-us-gov-west-1.amazonaws.com",
+"s3-sa-east-1.amazonaws.com",
+"s3-us-gov-west-1.amazonaws.com",
+"s3-us-east-2.amazonaws.com",
+"s3-us-west-1.amazonaws.com",
+"s3-us-west-2.amazonaws.com",
+"s3.ap-northeast-2.amazonaws.com",
+"s3.ap-south-1.amazonaws.com",
+"s3.cn-north-1.amazonaws.com.cn",
+"s3.ca-central-1.amazonaws.com",
+"s3.eu-central-1.amazonaws.com",
+"s3.eu-west-2.amazonaws.com",
+"s3.eu-west-3.amazonaws.com",
+"s3.us-east-2.amazonaws.com",
+"s3.dualstack.ap-northeast-1.amazonaws.com",
+"s3.dualstack.ap-northeast-2.amazonaws.com",
+"s3.dualstack.ap-south-1.amazonaws.com",
+"s3.dualstack.ap-southeast-1.amazonaws.com",
+"s3.dualstack.ap-southeast-2.amazonaws.com",
+"s3.dualstack.ca-central-1.amazonaws.com",
+"s3.dualstack.eu-central-1.amazonaws.com",
+"s3.dualstack.eu-west-1.amazonaws.com",
+"s3.dualstack.eu-west-2.amazonaws.com",
+"s3.dualstack.eu-west-3.amazonaws.com",
+"s3.dualstack.sa-east-1.amazonaws.com",
+"s3.dualstack.us-east-1.amazonaws.com",
+"s3.dualstack.us-east-2.amazonaws.com",
+"s3-website-us-east-1.amazonaws.com",
+"s3-website-us-west-1.amazonaws.com",
+"s3-website-us-west-2.amazonaws.com",
+"s3-website-ap-northeast-1.amazonaws.com",
+"s3-website-ap-southeast-1.amazonaws.com",
+"s3-website-ap-southeast-2.amazonaws.com",
+"s3-website-eu-west-1.amazonaws.com",
+"s3-website-sa-east-1.amazonaws.com",
+"s3-website.ap-northeast-2.amazonaws.com",
+"s3-website.ap-south-1.amazonaws.com",
+"s3-website.ca-central-1.amazonaws.com",
+"s3-website.eu-central-1.amazonaws.com",
+"s3-website.eu-west-2.amazonaws.com",
+"s3-website.eu-west-3.amazonaws.com",
+"s3-website.us-east-2.amazonaws.com",
+"amsw.nl",
+"t3l3p0rt.net",
+"tele.amune.org",
+"apigee.io",
+"on-aptible.com",
+"user.aseinet.ne.jp",
+"gv.vc",
+"d.gv.vc",
+"user.party.eus",
+"pimienta.org",
+"poivron.org",
+"potager.org",
+"sweetpepper.org",
+"myasustor.com",
+"myfritz.net",
+"*.awdev.ca",
+"*.advisor.ws",
+"b-data.io",
+"backplaneapp.io",
+"balena-devices.com",
+"app.banzaicloud.io",
+"betainabox.com",
+"bnr.la",
+"blackbaudcdn.net",
+"boomla.net",
+"boxfuse.io",
+"square7.ch",
+"bplaced.com",
+"bplaced.de",
+"square7.de",
+"bplaced.net",
+"square7.net",
+"browsersafetymark.io",
+"uk0.bigv.io",
+"dh.bytemark.co.uk",
+"vm.bytemark.co.uk",
+"mycd.eu",
+"carrd.co",
+"crd.co",
+"uwu.ai",
+"ae.org",
+"ar.com",
+"br.com",
+"cn.com",
+"com.de",
+"com.se",
+"de.com",
+"eu.com",
+"gb.com",
+"gb.net",
+"hu.com",
+"hu.net",
+"jp.net",
+"jpn.com",
+"kr.com",
+"mex.com",
+"no.com",
+"qc.com",
+"ru.com",
+"sa.com",
+"se.net",
+"uk.com",
+"uk.net",
+"us.com",
+"uy.com",
+"za.bz",
+"za.com",
+"africa.com",
+"gr.com",
+"in.net",
+"us.org",
+"co.com",
+"c.la",
+"certmgr.org",
+"xenapponazure.com",
+"discourse.group",
+"discourse.team",
+"virtueeldomein.nl",
+"cleverapps.io",
+"*.lcl.dev",
+"*.stg.dev",
+"c66.me",
+"cloud66.ws",
+"cloud66.zone",
+"jdevcloud.com",
+"wpdevcloud.com",
+"cloudaccess.host",
+"freesite.host",
+"cloudaccess.net",
+"cloudcontrolled.com",
+"cloudcontrolapp.com",
+"cloudera.site",
+"trycloudflare.com",
+"workers.dev",
+"wnext.app",
+"co.ca",
+"*.otap.co",
+"co.cz",
+"c.cdn77.org",
+"cdn77-ssl.net",
+"r.cdn77.net",
+"rsc.cdn77.org",
+"ssl.origin.cdn77-secure.org",
+"cloudns.asia",
+"cloudns.biz",
+"cloudns.club",
+"cloudns.cc",
+"cloudns.eu",
+"cloudns.in",
+"cloudns.info",
+"cloudns.org",
+"cloudns.pro",
+"cloudns.pw",
+"cloudns.us",
+"cloudeity.net",
+"cnpy.gdn",
+"co.nl",
+"co.no",
+"webhosting.be",
+"hosting-cluster.nl",
+"ac.ru",
+"edu.ru",
+"gov.ru",
+"int.ru",
+"mil.ru",
+"test.ru",
+"dyn.cosidns.de",
+"dynamisches-dns.de",
+"dnsupdater.de",
+"internet-dns.de",
+"l-o-g-i-n.de",
+"dynamic-dns.info",
+"feste-ip.net",
+"knx-server.net",
+"static-access.net",
+"realm.cz",
+"*.cryptonomic.net",
+"cupcake.is",
+"*.customer-oci.com",
+"*.oci.customer-oci.com",
+"*.ocp.customer-oci.com",
+"*.ocs.customer-oci.com",
+"cyon.link",
+"cyon.site",
+"daplie.me",
+"localhost.daplie.me",
+"dattolocal.com",
+"dattorelay.com",
+"dattoweb.com",
+"mydatto.com",
+"dattolocal.net",
+"mydatto.net",
+"biz.dk",
+"co.dk",
+"firm.dk",
+"reg.dk",
+"store.dk",
+"*.dapps.earth",
+"*.bzz.dapps.earth",
+"builtwithdark.com",
+"edgestack.me",
+"debian.net",
+"dedyn.io",
+"dnshome.de",
+"online.th",
+"shop.th",
+"drayddns.com",
+"dreamhosters.com",
+"mydrobo.com",
+"drud.io",
+"drud.us",
+"duckdns.org",
+"dy.fi",
+"tunk.org",
+"dyndns-at-home.com",
+"dyndns-at-work.com",
+"dyndns-blog.com",
+"dyndns-free.com",
+"dyndns-home.com",
+"dyndns-ip.com",
+"dyndns-mail.com",
+"dyndns-office.com",
+"dyndns-pics.com",
+"dyndns-remote.com",
+"dyndns-server.com",
+"dyndns-web.com",
+"dyndns-wiki.com",
+"dyndns-work.com",
+"dyndns.biz",
+"dyndns.info",
+"dyndns.org",
+"dyndns.tv",
+"at-band-camp.net",
+"ath.cx",
+"barrel-of-knowledge.info",
+"barrell-of-knowledge.info",
+"better-than.tv",
+"blogdns.com",
+"blogdns.net",
+"blogdns.org",
+"blogsite.org",
+"boldlygoingnowhere.org",
+"broke-it.net",
+"buyshouses.net",
+"cechire.com",
+"dnsalias.com",
+"dnsalias.net",
+"dnsalias.org",
+"dnsdojo.com",
+"dnsdojo.net",
+"dnsdojo.org",
+"does-it.net",
+"doesntexist.com",
+"doesntexist.org",
+"dontexist.com",
+"dontexist.net",
+"dontexist.org",
+"doomdns.com",
+"doomdns.org",
+"dvrdns.org",
+"dyn-o-saur.com",
+"dynalias.com",
+"dynalias.net",
+"dynalias.org",
+"dynathome.net",
+"dyndns.ws",
+"endofinternet.net",
+"endofinternet.org",
+"endoftheinternet.org",
+"est-a-la-maison.com",
+"est-a-la-masion.com",
+"est-le-patron.com",
+"est-mon-blogueur.com",
+"for-better.biz",
+"for-more.biz",
+"for-our.info",
+"for-some.biz",
+"for-the.biz",
+"forgot.her.name",
+"forgot.his.name",
+"from-ak.com",
+"from-al.com",
+"from-ar.com",
+"from-az.net",
+"from-ca.com",
+"from-co.net",
+"from-ct.com",
+"from-dc.com",
+"from-de.com",
+"from-fl.com",
+"from-ga.com",
+"from-hi.com",
+"from-ia.com",
+"from-id.com",
+"from-il.com",
+"from-in.com",
+"from-ks.com",
+"from-ky.com",
+"from-la.net",
+"from-ma.com",
+"from-md.com",
+"from-me.org",
+"from-mi.com",
+"from-mn.com",
+"from-mo.com",
+"from-ms.com",
+"from-mt.com",
+"from-nc.com",
+"from-nd.com",
+"from-ne.com",
+"from-nh.com",
+"from-nj.com",
+"from-nm.com",
+"from-nv.com",
+"from-ny.net",
+"from-oh.com",
+"from-ok.com",
+"from-or.com",
+"from-pa.com",
+"from-pr.com",
+"from-ri.com",
+"from-sc.com",
+"from-sd.com",
+"from-tn.com",
+"from-tx.com",
+"from-ut.com",
+"from-va.com",
+"from-vt.com",
+"from-wa.com",
+"from-wi.com",
+"from-wv.com",
+"from-wy.com",
+"ftpaccess.cc",
+"fuettertdasnetz.de",
+"game-host.org",
+"game-server.cc",
+"getmyip.com",
+"gets-it.net",
+"go.dyndns.org",
+"gotdns.com",
+"gotdns.org",
+"groks-the.info",
+"groks-this.info",
+"ham-radio-op.net",
+"here-for-more.info",
+"hobby-site.com",
+"hobby-site.org",
+"home.dyndns.org",
+"homedns.org",
+"homeftp.net",
+"homeftp.org",
+"homeip.net",
+"homelinux.com",
+"homelinux.net",
+"homelinux.org",
+"homeunix.com",
+"homeunix.net",
+"homeunix.org",
+"iamallama.com",
+"in-the-band.net",
+"is-a-anarchist.com",
+"is-a-blogger.com",
+"is-a-bookkeeper.com",
+"is-a-bruinsfan.org",
+"is-a-bulls-fan.com",
+"is-a-candidate.org",
+"is-a-caterer.com",
+"is-a-celticsfan.org",
+"is-a-chef.com",
+"is-a-chef.net",
+"is-a-chef.org",
+"is-a-conservative.com",
+"is-a-cpa.com",
+"is-a-cubicle-slave.com",
+"is-a-democrat.com",
+"is-a-designer.com",
+"is-a-doctor.com",
+"is-a-financialadvisor.com",
+"is-a-geek.com",
+"is-a-geek.net",
+"is-a-geek.org",
+"is-a-green.com",
+"is-a-guru.com",
+"is-a-hard-worker.com",
+"is-a-hunter.com",
+"is-a-knight.org",
+"is-a-landscaper.com",
+"is-a-lawyer.com",
+"is-a-liberal.com",
+"is-a-libertarian.com",
+"is-a-linux-user.org",
+"is-a-llama.com",
+"is-a-musician.com",
+"is-a-nascarfan.com",
+"is-a-nurse.com",
+"is-a-painter.com",
+"is-a-patsfan.org",
+"is-a-personaltrainer.com",
+"is-a-photographer.com",
+"is-a-player.com",
+"is-a-republican.com",
+"is-a-rockstar.com",
+"is-a-socialist.com",
+"is-a-soxfan.org",
+"is-a-student.com",
+"is-a-teacher.com",
+"is-a-techie.com",
+"is-a-therapist.com",
+"is-an-accountant.com",
+"is-an-actor.com",
+"is-an-actress.com",
+"is-an-anarchist.com",
+"is-an-artist.com",
+"is-an-engineer.com",
+"is-an-entertainer.com",
+"is-by.us",
+"is-certified.com",
+"is-found.org",
+"is-gone.com",
+"is-into-anime.com",
+"is-into-cars.com",
+"is-into-cartoons.com",
+"is-into-games.com",
+"is-leet.com",
+"is-lost.org",
+"is-not-certified.com",
+"is-saved.org",
+"is-slick.com",
+"is-uberleet.com",
+"is-very-bad.org",
+"is-very-evil.org",
+"is-very-good.org",
+"is-very-nice.org",
+"is-very-sweet.org",
+"is-with-theband.com",
+"isa-geek.com",
+"isa-geek.net",
+"isa-geek.org",
+"isa-hockeynut.com",
+"issmarterthanyou.com",
+"isteingeek.de",
+"istmein.de",
+"kicks-ass.net",
+"kicks-ass.org",
+"knowsitall.info",
+"land-4-sale.us",
+"lebtimnetz.de",
+"leitungsen.de",
+"likes-pie.com",
+"likescandy.com",
+"merseine.nu",
+"mine.nu",
+"misconfused.org",
+"mypets.ws",
+"myphotos.cc",
+"neat-url.com",
+"office-on-the.net",
+"on-the-web.tv",
+"podzone.net",
+"podzone.org",
+"readmyblog.org",
+"saves-the-whales.com",
+"scrapper-site.net",
+"scrapping.cc",
+"selfip.biz",
+"selfip.com",
+"selfip.info",
+"selfip.net",
+"selfip.org",
+"sells-for-less.com",
+"sells-for-u.com",
+"sells-it.net",
+"sellsyourhome.org",
+"servebbs.com",
+"servebbs.net",
+"servebbs.org",
+"serveftp.net",
+"serveftp.org",
+"servegame.org",
+"shacknet.nu",
+"simple-url.com",
+"space-to-rent.com",
+"stuff-4-sale.org",
+"stuff-4-sale.us",
+"teaches-yoga.com",
+"thruhere.net",
+"traeumtgerade.de",
+"webhop.biz",
+"webhop.info",
+"webhop.net",
+"webhop.org",
+"worse-than.tv",
+"writesthisblog.com",
+"ddnss.de",
+"dyn.ddnss.de",
+"dyndns.ddnss.de",
+"dyndns1.de",
+"dyn-ip24.de",
+"home-webserver.de",
+"dyn.home-webserver.de",
+"myhome-server.de",
+"ddnss.org",
+"definima.net",
+"definima.io",
+"bci.dnstrace.pro",
+"ddnsfree.com",
+"ddnsgeek.com",
+"giize.com",
+"gleeze.com",
+"kozow.com",
+"loseyourip.com",
+"ooguy.com",
+"theworkpc.com",
+"casacam.net",
+"dynu.net",
+"accesscam.org",
+"camdvr.org",
+"freeddns.org",
+"mywire.org",
+"webredirect.org",
+"myddns.rocks",
+"blogsite.xyz",
+"dynv6.net",
+"e4.cz",
+"en-root.fr",
+"mytuleap.com",
+"onred.one",
+"staging.onred.one",
+"enonic.io",
+"customer.enonic.io",
+"eu.org",
+"al.eu.org",
+"asso.eu.org",
+"at.eu.org",
+"au.eu.org",
+"be.eu.org",
+"bg.eu.org",
+"ca.eu.org",
+"cd.eu.org",
+"ch.eu.org",
+"cn.eu.org",
+"cy.eu.org",
+"cz.eu.org",
+"de.eu.org",
+"dk.eu.org",
+"edu.eu.org",
+"ee.eu.org",
+"es.eu.org",
+"fi.eu.org",
+"fr.eu.org",
+"gr.eu.org",
+"hr.eu.org",
+"hu.eu.org",
+"ie.eu.org",
+"il.eu.org",
+"in.eu.org",
+"int.eu.org",
+"is.eu.org",
+"it.eu.org",
+"jp.eu.org",
+"kr.eu.org",
+"lt.eu.org",
+"lu.eu.org",
+"lv.eu.org",
+"mc.eu.org",
+"me.eu.org",
+"mk.eu.org",
+"mt.eu.org",
+"my.eu.org",
+"net.eu.org",
+"ng.eu.org",
+"nl.eu.org",
+"no.eu.org",
+"nz.eu.org",
+"paris.eu.org",
+"pl.eu.org",
+"pt.eu.org",
+"q-a.eu.org",
+"ro.eu.org",
+"ru.eu.org",
+"se.eu.org",
+"si.eu.org",
+"sk.eu.org",
+"tr.eu.org",
+"uk.eu.org",
+"us.eu.org",
+"eu-1.evennode.com",
+"eu-2.evennode.com",
+"eu-3.evennode.com",
+"eu-4.evennode.com",
+"us-1.evennode.com",
+"us-2.evennode.com",
+"us-3.evennode.com",
+"us-4.evennode.com",
+"twmail.cc",
+"twmail.net",
+"twmail.org",
+"mymailer.com.tw",
+"url.tw",
+"apps.fbsbx.com",
+"ru.net",
+"adygeya.ru",
+"bashkiria.ru",
+"bir.ru",
+"cbg.ru",
+"com.ru",
+"dagestan.ru",
+"grozny.ru",
+"kalmykia.ru",
+"kustanai.ru",
+"marine.ru",
+"mordovia.ru",
+"msk.ru",
+"mytis.ru",
+"nalchik.ru",
+"nov.ru",
+"pyatigorsk.ru",
+"spb.ru",
+"vladikavkaz.ru",
+"vladimir.ru",
+"abkhazia.su",
+"adygeya.su",
+"aktyubinsk.su",
+"arkhangelsk.su",
+"armenia.su",
+"ashgabad.su",
+"azerbaijan.su",
+"balashov.su",
+"bashkiria.su",
+"bryansk.su",
+"bukhara.su",
+"chimkent.su",
+"dagestan.su",
+"east-kazakhstan.su",
+"exnet.su",
+"georgia.su",
+"grozny.su",
+"ivanovo.su",
+"jambyl.su",
+"kalmykia.su",
+"kaluga.su",
+"karacol.su",
+"karaganda.su",
+"karelia.su",
+"khakassia.su",
+"krasnodar.su",
+"kurgan.su",
+"kustanai.su",
+"lenug.su",
+"mangyshlak.su",
+"mordovia.su",
+"msk.su",
+"murmansk.su",
+"nalchik.su",
+"navoi.su",
+"north-kazakhstan.su",
+"nov.su",
+"obninsk.su",
+"penza.su",
+"pokrovsk.su",
+"sochi.su",
+"spb.su",
+"tashkent.su",
+"termez.su",
+"togliatti.su",
+"troitsk.su",
+"tselinograd.su",
+"tula.su",
+"tuva.su",
+"vladikavkaz.su",
+"vladimir.su",
+"vologda.su",
+"channelsdvr.net",
+"u.channelsdvr.net",
+"fastly-terrarium.com",
+"fastlylb.net",
+"map.fastlylb.net",
+"freetls.fastly.net",
+"map.fastly.net",
+"a.prod.fastly.net",
+"global.prod.fastly.net",
+"a.ssl.fastly.net",
+"b.ssl.fastly.net",
+"global.ssl.fastly.net",
+"fastpanel.direct",
+"fastvps-server.com",
+"fhapp.xyz",
+"fedorainfracloud.org",
+"fedorapeople.org",
+"cloud.fedoraproject.org",
+"app.os.fedoraproject.org",
+"app.os.stg.fedoraproject.org",
+"mydobiss.com",
+"filegear.me",
+"filegear-au.me",
+"filegear-de.me",
+"filegear-gb.me",
+"filegear-ie.me",
+"filegear-jp.me",
+"filegear-sg.me",
+"firebaseapp.com",
+"flynnhub.com",
+"flynnhosting.net",
+"0e.vc",
+"freebox-os.com",
+"freeboxos.com",
+"fbx-os.fr",
+"fbxos.fr",
+"freebox-os.fr",
+"freeboxos.fr",
+"freedesktop.org",
+"*.futurecms.at",
+"*.ex.futurecms.at",
+"*.in.futurecms.at",
+"futurehosting.at",
+"futuremailing.at",
+"*.ex.ortsinfo.at",
+"*.kunden.ortsinfo.at",
+"*.statics.cloud",
+"service.gov.uk",
+"gehirn.ne.jp",
+"usercontent.jp",
+"gentapps.com",
+"lab.ms",
+"github.io",
+"githubusercontent.com",
+"gitlab.io",
+"glitch.me",
+"lolipop.io",
+"cloudapps.digital",
+"london.cloudapps.digital",
+"homeoffice.gov.uk",
+"ro.im",
+"shop.ro",
+"goip.de",
+"run.app",
+"a.run.app",
+"web.app",
+"*.0emm.com",
+"appspot.com",
+"*.r.appspot.com",
+"blogspot.ae",
+"blogspot.al",
+"blogspot.am",
+"blogspot.ba",
+"blogspot.be",
+"blogspot.bg",
+"blogspot.bj",
+"blogspot.ca",
+"blogspot.cf",
+"blogspot.ch",
+"blogspot.cl",
+"blogspot.co.at",
+"blogspot.co.id",
+"blogspot.co.il",
+"blogspot.co.ke",
+"blogspot.co.nz",
+"blogspot.co.uk",
+"blogspot.co.za",
+"blogspot.com",
+"blogspot.com.ar",
+"blogspot.com.au",
+"blogspot.com.br",
+"blogspot.com.by",
+"blogspot.com.co",
+"blogspot.com.cy",
+"blogspot.com.ee",
+"blogspot.com.eg",
+"blogspot.com.es",
+"blogspot.com.mt",
+"blogspot.com.ng",
+"blogspot.com.tr",
+"blogspot.com.uy",
+"blogspot.cv",
+"blogspot.cz",
+"blogspot.de",
+"blogspot.dk",
+"blogspot.fi",
+"blogspot.fr",
+"blogspot.gr",
+"blogspot.hk",
+"blogspot.hr",
+"blogspot.hu",
+"blogspot.ie",
+"blogspot.in",
+"blogspot.is",
+"blogspot.it",
+"blogspot.jp",
+"blogspot.kr",
+"blogspot.li",
+"blogspot.lt",
+"blogspot.lu",
+"blogspot.md",
+"blogspot.mk",
+"blogspot.mr",
+"blogspot.mx",
+"blogspot.my",
+"blogspot.nl",
+"blogspot.no",
+"blogspot.pe",
+"blogspot.pt",
+"blogspot.qa",
+"blogspot.re",
+"blogspot.ro",
+"blogspot.rs",
+"blogspot.ru",
+"blogspot.se",
+"blogspot.sg",
+"blogspot.si",
+"blogspot.sk",
+"blogspot.sn",
+"blogspot.td",
+"blogspot.tw",
+"blogspot.ug",
+"blogspot.vn",
+"cloudfunctions.net",
+"cloud.goog",
+"codespot.com",
+"googleapis.com",
+"googlecode.com",
+"pagespeedmobilizer.com",
+"publishproxy.com",
+"withgoogle.com",
+"withyoutube.com",
+"awsmppl.com",
+"fin.ci",
+"free.hr",
+"caa.li",
+"ua.rs",
+"conf.se",
+"hs.zone",
+"hs.run",
+"hashbang.sh",
+"hasura.app",
+"hasura-app.io",
+"hepforge.org",
+"herokuapp.com",
+"herokussl.com",
+"myravendb.com",
+"ravendb.community",
+"ravendb.me",
+"development.run",
+"ravendb.run",
+"bpl.biz",
+"orx.biz",
+"ng.city",
+"biz.gl",
+"ng.ink",
+"col.ng",
+"firm.ng",
+"gen.ng",
+"ltd.ng",
+"ngo.ng",
+"ng.school",
+"sch.so",
+"häkkinen.fi",
+"*.moonscale.io",
+"moonscale.net",
+"iki.fi",
+"dyn-berlin.de",
+"in-berlin.de",
+"in-brb.de",
+"in-butter.de",
+"in-dsl.de",
+"in-dsl.net",
+"in-dsl.org",
+"in-vpn.de",
+"in-vpn.net",
+"in-vpn.org",
+"biz.at",
+"info.at",
+"info.cx",
+"ac.leg.br",
+"al.leg.br",
+"am.leg.br",
+"ap.leg.br",
+"ba.leg.br",
+"ce.leg.br",
+"df.leg.br",
+"es.leg.br",
+"go.leg.br",
+"ma.leg.br",
+"mg.leg.br",
+"ms.leg.br",
+"mt.leg.br",
+"pa.leg.br",
+"pb.leg.br",
+"pe.leg.br",
+"pi.leg.br",
+"pr.leg.br",
+"rj.leg.br",
+"rn.leg.br",
+"ro.leg.br",
+"rr.leg.br",
+"rs.leg.br",
+"sc.leg.br",
+"se.leg.br",
+"sp.leg.br",
+"to.leg.br",
+"pixolino.com",
+"ipifony.net",
+"mein-iserv.de",
+"test-iserv.de",
+"iserv.dev",
+"iobb.net",
+"myjino.ru",
+"*.hosting.myjino.ru",
+"*.landing.myjino.ru",
+"*.spectrum.myjino.ru",
+"*.vps.myjino.ru",
+"*.triton.zone",
+"*.cns.joyent.com",
+"js.org",
+"kaas.gg",
+"khplay.nl",
+"keymachine.de",
+"kinghost.net",
+"uni5.net",
+"knightpoint.systems",
+"oya.to",
+"co.krd",
+"edu.krd",
+"git-repos.de",
+"lcube-server.de",
+"svn-repos.de",
+"leadpages.co",
+"lpages.co",
+"lpusercontent.com",
+"lelux.site",
+"co.business",
+"co.education",
+"co.events",
+"co.financial",
+"co.network",
+"co.place",
+"co.technology",
+"app.lmpm.com",
+"linkitools.space",
+"linkyard.cloud",
+"linkyard-cloud.ch",
+"members.linode.com",
+"nodebalancer.linode.com",
+"we.bs",
+"loginline.app",
+"loginline.dev",
+"loginline.io",
+"loginline.services",
+"loginline.site",
+"krasnik.pl",
+"leczna.pl",
+"lubartow.pl",
+"lublin.pl",
+"poniatowa.pl",
+"swidnik.pl",
+"uklugs.org",
+"glug.org.uk",
+"lug.org.uk",
+"lugs.org.uk",
+"barsy.bg",
+"barsy.co.uk",
+"barsyonline.co.uk",
+"barsycenter.com",
+"barsyonline.com",
+"barsy.club",
+"barsy.de",
+"barsy.eu",
+"barsy.in",
+"barsy.info",
+"barsy.io",
+"barsy.me",
+"barsy.menu",
+"barsy.mobi",
+"barsy.net",
+"barsy.online",
+"barsy.org",
+"barsy.pro",
+"barsy.pub",
+"barsy.shop",
+"barsy.site",
+"barsy.support",
+"barsy.uk",
+"*.magentosite.cloud",
+"mayfirst.info",
+"mayfirst.org",
+"hb.cldmail.ru",
+"miniserver.com",
+"memset.net",
+"cloud.metacentrum.cz",
+"custom.metacentrum.cz",
+"flt.cloud.muni.cz",
+"usr.cloud.muni.cz",
+"meteorapp.com",
+"eu.meteorapp.com",
+"co.pl",
+"azurecontainer.io",
+"azurewebsites.net",
+"azure-mobile.net",
+"cloudapp.net",
+"mozilla-iot.org",
+"bmoattachments.org",
+"net.ru",
+"org.ru",
+"pp.ru",
+"ui.nabu.casa",
+"pony.club",
+"of.fashion",
+"on.fashion",
+"of.football",
+"in.london",
+"of.london",
+"for.men",
+"and.mom",
+"for.mom",
+"for.one",
+"for.sale",
+"of.work",
+"to.work",
+"nctu.me",
+"bitballoon.com",
+"netlify.com",
+"4u.com",
+"ngrok.io",
+"nh-serv.co.uk",
+"nfshost.com",
+"dnsking.ch",
+"mypi.co",
+"n4t.co",
+"001www.com",
+"ddnslive.com",
+"myiphost.com",
+"forumz.info",
+"16-b.it",
+"32-b.it",
+"64-b.it",
+"soundcast.me",
+"tcp4.me",
+"dnsup.net",
+"hicam.net",
+"now-dns.net",
+"ownip.net",
+"vpndns.net",
+"dynserv.org",
+"now-dns.org",
+"x443.pw",
+"now-dns.top",
+"ntdll.top",
+"freeddns.us",
+"crafting.xyz",
+"zapto.xyz",
+"nsupdate.info",
+"nerdpol.ovh",
+"blogsyte.com",
+"brasilia.me",
+"cable-modem.org",
+"ciscofreak.com",
+"collegefan.org",
+"couchpotatofries.org",
+"damnserver.com",
+"ddns.me",
+"ditchyourip.com",
+"dnsfor.me",
+"dnsiskinky.com",
+"dvrcam.info",
+"dynns.com",
+"eating-organic.net",
+"fantasyleague.cc",
+"geekgalaxy.com",
+"golffan.us",
+"health-carereform.com",
+"homesecuritymac.com",
+"homesecuritypc.com",
+"hopto.me",
+"ilovecollege.info",
+"loginto.me",
+"mlbfan.org",
+"mmafan.biz",
+"myactivedirectory.com",
+"mydissent.net",
+"myeffect.net",
+"mymediapc.net",
+"mypsx.net",
+"mysecuritycamera.com",
+"mysecuritycamera.net",
+"mysecuritycamera.org",
+"net-freaks.com",
+"nflfan.org",
+"nhlfan.net",
+"no-ip.ca",
+"no-ip.co.uk",
+"no-ip.net",
+"noip.us",
+"onthewifi.com",
+"pgafan.net",
+"point2this.com",
+"pointto.us",
+"privatizehealthinsurance.net",
+"quicksytes.com",
+"read-books.org",
+"securitytactics.com",
+"serveexchange.com",
+"servehumour.com",
+"servep2p.com",
+"servesarcasm.com",
+"stufftoread.com",
+"ufcfan.org",
+"unusualperson.com",
+"workisboring.com",
+"3utilities.com",
+"bounceme.net",
+"ddns.net",
+"ddnsking.com",
+"gotdns.ch",
+"hopto.org",
+"myftp.biz",
+"myftp.org",
+"myvnc.com",
+"no-ip.biz",
+"no-ip.info",
+"no-ip.org",
+"noip.me",
+"redirectme.net",
+"servebeer.com",
+"serveblog.net",
+"servecounterstrike.com",
+"serveftp.com",
+"servegame.com",
+"servehalflife.com",
+"servehttp.com",
+"serveirc.com",
+"serveminecraft.net",
+"servemp3.com",
+"servepics.com",
+"servequake.com",
+"sytes.net",
+"webhop.me",
+"zapto.org",
+"stage.nodeart.io",
+"nodum.co",
+"nodum.io",
+"pcloud.host",
+"nyc.mn",
+"nom.ae",
+"nom.af",
+"nom.ai",
+"nom.al",
+"nym.by",
+"nom.bz",
+"nym.bz",
+"nom.cl",
+"nym.ec",
+"nom.gd",
+"nom.ge",
+"nom.gl",
+"nym.gr",
+"nom.gt",
+"nym.gy",
+"nym.hk",
+"nom.hn",
+"nym.ie",
+"nom.im",
+"nom.ke",
+"nym.kz",
+"nym.la",
+"nym.lc",
+"nom.li",
+"nym.li",
+"nym.lt",
+"nym.lu",
+"nom.lv",
+"nym.me",
+"nom.mk",
+"nym.mn",
+"nym.mx",
+"nom.nu",
+"nym.nz",
+"nym.pe",
+"nym.pt",
+"nom.pw",
+"nom.qa",
+"nym.ro",
+"nom.rs",
+"nom.si",
+"nym.sk",
+"nom.st",
+"nym.su",
+"nym.sx",
+"nom.tj",
+"nym.tw",
+"nom.ug",
+"nom.uy",
+"nom.vc",
+"nom.vg",
+"static.observableusercontent.com",
+"cya.gg",
+"cloudycluster.net",
+"nid.io",
+"opencraft.hosting",
+"operaunite.com",
+"skygearapp.com",
+"outsystemscloud.com",
+"ownprovider.com",
+"own.pm",
+"ox.rs",
+"oy.lc",
+"pgfog.com",
+"pagefrontapp.com",
+"art.pl",
+"gliwice.pl",
+"krakow.pl",
+"poznan.pl",
+"wroc.pl",
+"zakopane.pl",
+"pantheonsite.io",
+"gotpantheon.com",
+"mypep.link",
+"perspecta.cloud",
+"on-web.fr",
+"*.platform.sh",
+"*.platformsh.site",
+"dyn53.io",
+"co.bn",
+"xen.prgmr.com",
+"priv.at",
+"prvcy.page",
+"*.dweb.link",
+"protonet.io",
+"chirurgiens-dentistes-en-france.fr",
+"byen.site",
+"pubtls.org",
+"qualifioapp.com",
+"qbuser.com",
+"instantcloud.cn",
+"ras.ru",
+"qa2.com",
+"qcx.io",
+"*.sys.qcx.io",
+"dev-myqnapcloud.com",
+"alpha-myqnapcloud.com",
+"myqnapcloud.com",
+"*.quipelements.com",
+"vapor.cloud",
+"vaporcloud.io",
+"rackmaze.com",
+"rackmaze.net",
+"*.on-k3s.io",
+"*.on-rancher.cloud",
+"*.on-rio.io",
+"readthedocs.io",
+"rhcloud.com",
+"app.render.com",
+"onrender.com",
+"repl.co",
+"repl.run",
+"resindevice.io",
+"devices.resinstaging.io",
+"hzc.io",
+"wellbeingzone.eu",
+"ptplus.fit",
+"wellbeingzone.co.uk",
+"git-pages.rit.edu",
+"sandcats.io",
+"logoip.de",
+"logoip.com",
+"schokokeks.net",
+"gov.scot",
+"scrysec.com",
+"firewall-gateway.com",
+"firewall-gateway.de",
+"my-gateway.de",
+"my-router.de",
+"spdns.de",
+"spdns.eu",
+"firewall-gateway.net",
+"my-firewall.org",
+"myfirewall.org",
+"spdns.org",
+"senseering.net",
+"biz.ua",
+"co.ua",
+"pp.ua",
+"shiftedit.io",
+"myshopblocks.com",
+"shopitsite.com",
+"mo-siemens.io",
+"1kapp.com",
+"appchizi.com",
+"applinzi.com",
+"sinaapp.com",
+"vipsinaapp.com",
+"siteleaf.net",
+"bounty-full.com",
+"alpha.bounty-full.com",
+"beta.bounty-full.com",
+"stackhero-network.com",
+"static.land",
+"dev.static.land",
+"sites.static.land",
+"apps.lair.io",
+"*.stolos.io",
+"spacekit.io",
+"customer.speedpartner.de",
+"api.stdlib.com",
+"storj.farm",
+"utwente.io",
+"soc.srcf.net",
+"user.srcf.net",
+"temp-dns.com",
+"applicationcloud.io",
+"scapp.io",
+"*.s5y.io",
+"*.sensiosite.cloud",
+"syncloud.it",
+"diskstation.me",
+"dscloud.biz",
+"dscloud.me",
+"dscloud.mobi",
+"dsmynas.com",
+"dsmynas.net",
+"dsmynas.org",
+"familyds.com",
+"familyds.net",
+"familyds.org",
+"i234.me",
+"myds.me",
+"synology.me",
+"vpnplus.to",
+"direct.quickconnect.to",
+"taifun-dns.de",
+"gda.pl",
+"gdansk.pl",
+"gdynia.pl",
+"med.pl",
+"sopot.pl",
+"edugit.org",
+"telebit.app",
+"telebit.io",
+"*.telebit.xyz",
+"gwiddle.co.uk",
+"thingdustdata.com",
+"cust.dev.thingdust.io",
+"cust.disrec.thingdust.io",
+"cust.prod.thingdust.io",
+"cust.testing.thingdust.io",
+"arvo.network",
+"azimuth.network",
+"bloxcms.com",
+"townnews-staging.com",
+"12hp.at",
+"2ix.at",
+"4lima.at",
+"lima-city.at",
+"12hp.ch",
+"2ix.ch",
+"4lima.ch",
+"lima-city.ch",
+"trafficplex.cloud",
+"de.cool",
+"12hp.de",
+"2ix.de",
+"4lima.de",
+"lima-city.de",
+"1337.pictures",
+"clan.rip",
+"lima-city.rocks",
+"webspace.rocks",
+"lima.zone",
+"*.transurl.be",
+"*.transurl.eu",
+"*.transurl.nl",
+"tuxfamily.org",
+"dd-dns.de",
+"diskstation.eu",
+"diskstation.org",
+"dray-dns.de",
+"draydns.de",
+"dyn-vpn.de",
+"dynvpn.de",
+"mein-vigor.de",
+"my-vigor.de",
+"my-wan.de",
+"syno-ds.de",
+"synology-diskstation.de",
+"synology-ds.de",
+"uber.space",
+"*.uberspace.de",
+"hk.com",
+"hk.org",
+"ltd.hk",
+"inc.hk",
+"virtualuser.de",
+"virtual-user.de",
+"urown.cloud",
+"dnsupdate.info",
+"lib.de.us",
+"2038.io",
+"router.management",
+"v-info.info",
+"voorloper.cloud",
+"v.ua",
+"wafflecell.com",
+"*.webhare.dev",
+"wedeploy.io",
+"wedeploy.me",
+"wedeploy.sh",
+"remotewd.com",
+"wmflabs.org",
+"myforum.community",
+"community-pro.de",
+"diskussionsbereich.de",
+"community-pro.net",
+"meinforum.net",
+"half.host",
+"xnbay.com",
+"u2.xnbay.com",
+"u2-local.xnbay.com",
+"cistron.nl",
+"demon.nl",
+"xs4all.space",
+"yandexcloud.net",
+"storage.yandexcloud.net",
+"website.yandexcloud.net",
+"official.academy",
+"yolasite.com",
+"ybo.faith",
+"yombo.me",
+"homelink.one",
+"ybo.party",
+"ybo.review",
+"ybo.science",
+"ybo.trade",
+"nohost.me",
+"noho.st",
+"za.net",
+"za.org",
+"now.sh",
+"bss.design",
+"basicserver.io",
+"virtualserver.io",
+"enterprisecloud.nu"
+]
\ No newline at end of file
diff --git a/familyark/app/node_modules/psl/dist/psl.js b/familyark/app/node_modules/psl/dist/psl.js
new file mode 100644
index 0000000..f4b9b89
--- /dev/null
+++ b/familyark/app/node_modules/psl/dist/psl.js
@@ -0,0 +1,9645 @@
+(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.psl = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i= punySuffix.length) {
+ // return memo;
+ // }
+ //}
+ return rule;
+ }, null);
+};
+
+
+//
+// Error codes and messages.
+//
+exports.errorCodes = {
+ DOMAIN_TOO_SHORT: 'Domain name too short.',
+ DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.',
+ LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.',
+ LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.',
+ LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.',
+ LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.',
+ LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.'
+};
+
+
+//
+// Validate domain name and throw if not valid.
+//
+// From wikipedia:
+//
+// Hostnames are composed of series of labels concatenated with dots, as are all
+// domain names. Each label must be between 1 and 63 characters long, and the
+// entire hostname (including the delimiting dots) has a maximum of 255 chars.
+//
+// Allowed chars:
+//
+// * `a-z`
+// * `0-9`
+// * `-` but not as a starting or ending character
+// * `.` as a separator for the textual portions of a domain name
+//
+// * http://en.wikipedia.org/wiki/Domain_name
+// * http://en.wikipedia.org/wiki/Hostname
+//
+internals.validate = function (input) {
+
+ // Before we can validate we need to take care of IDNs with unicode chars.
+ var ascii = Punycode.toASCII(input);
+
+ if (ascii.length < 1) {
+ return 'DOMAIN_TOO_SHORT';
+ }
+ if (ascii.length > 255) {
+ return 'DOMAIN_TOO_LONG';
+ }
+
+ // Check each part's length and allowed chars.
+ var labels = ascii.split('.');
+ var label;
+
+ for (var i = 0; i < labels.length; ++i) {
+ label = labels[i];
+ if (!label.length) {
+ return 'LABEL_TOO_SHORT';
+ }
+ if (label.length > 63) {
+ return 'LABEL_TOO_LONG';
+ }
+ if (label.charAt(0) === '-') {
+ return 'LABEL_STARTS_WITH_DASH';
+ }
+ if (label.charAt(label.length - 1) === '-') {
+ return 'LABEL_ENDS_WITH_DASH';
+ }
+ if (!/^[a-z0-9\-]+$/.test(label)) {
+ return 'LABEL_INVALID_CHARS';
+ }
+ }
+};
+
+
+//
+// Public API
+//
+
+
+//
+// Parse domain.
+//
+exports.parse = function (input) {
+
+ if (typeof input !== 'string') {
+ throw new TypeError('Domain name must be a string.');
+ }
+
+ // Force domain to lowercase.
+ var domain = input.slice(0).toLowerCase();
+
+ // Handle FQDN.
+ // TODO: Simply remove trailing dot?
+ if (domain.charAt(domain.length - 1) === '.') {
+ domain = domain.slice(0, domain.length - 1);
+ }
+
+ // Validate and sanitise input.
+ var error = internals.validate(domain);
+ if (error) {
+ return {
+ input: input,
+ error: {
+ message: exports.errorCodes[error],
+ code: error
+ }
+ };
+ }
+
+ var parsed = {
+ input: input,
+ tld: null,
+ sld: null,
+ domain: null,
+ subdomain: null,
+ listed: false
+ };
+
+ var domainParts = domain.split('.');
+
+ // Non-Internet TLD
+ if (domainParts[domainParts.length - 1] === 'local') {
+ return parsed;
+ }
+
+ var handlePunycode = function () {
+
+ if (!/xn--/.test(domain)) {
+ return parsed;
+ }
+ if (parsed.domain) {
+ parsed.domain = Punycode.toASCII(parsed.domain);
+ }
+ if (parsed.subdomain) {
+ parsed.subdomain = Punycode.toASCII(parsed.subdomain);
+ }
+ return parsed;
+ };
+
+ var rule = internals.findRule(domain);
+
+ // Unlisted tld.
+ if (!rule) {
+ if (domainParts.length < 2) {
+ return parsed;
+ }
+ parsed.tld = domainParts.pop();
+ parsed.sld = domainParts.pop();
+ parsed.domain = [parsed.sld, parsed.tld].join('.');
+ if (domainParts.length) {
+ parsed.subdomain = domainParts.pop();
+ }
+ return handlePunycode();
+ }
+
+ // At this point we know the public suffix is listed.
+ parsed.listed = true;
+
+ var tldParts = rule.suffix.split('.');
+ var privateParts = domainParts.slice(0, domainParts.length - tldParts.length);
+
+ if (rule.exception) {
+ privateParts.push(tldParts.shift());
+ }
+
+ parsed.tld = tldParts.join('.');
+
+ if (!privateParts.length) {
+ return handlePunycode();
+ }
+
+ if (rule.wildcard) {
+ tldParts.unshift(privateParts.pop());
+ parsed.tld = tldParts.join('.');
+ }
+
+ if (!privateParts.length) {
+ return handlePunycode();
+ }
+
+ parsed.sld = privateParts.pop();
+ parsed.domain = [parsed.sld, parsed.tld].join('.');
+
+ if (privateParts.length) {
+ parsed.subdomain = privateParts.join('.');
+ }
+
+ return handlePunycode();
+};
+
+
+//
+// Get domain.
+//
+exports.get = function (domain) {
+
+ if (!domain) {
+ return null;
+ }
+ return exports.parse(domain).domain || null;
+};
+
+
+//
+// Check whether domain belongs to a known public suffix.
+//
+exports.isValid = function (domain) {
+
+ var parsed = exports.parse(domain);
+ return Boolean(parsed.domain && parsed.listed);
+};
+
+},{"./data/rules.json":1,"punycode":3}],3:[function(require,module,exports){
+(function (global){
+/*! https://mths.be/punycode v1.4.1 by @mathias */
+;(function(root) {
+
+ /** Detect free variables */
+ var freeExports = typeof exports == 'object' && exports &&
+ !exports.nodeType && exports;
+ var freeModule = typeof module == 'object' && module &&
+ !module.nodeType && module;
+ var freeGlobal = typeof global == 'object' && global;
+ if (
+ freeGlobal.global === freeGlobal ||
+ freeGlobal.window === freeGlobal ||
+ freeGlobal.self === freeGlobal
+ ) {
+ root = freeGlobal;
+ }
+
+ /**
+ * The `punycode` object.
+ * @name punycode
+ * @type Object
+ */
+ var punycode,
+
+ /** Highest positive signed 32-bit float value */
+ maxInt = 2147483647, // aka. 0x7FFFFFFF or 2^31-1
+
+ /** Bootstring parameters */
+ base = 36,
+ tMin = 1,
+ tMax = 26,
+ skew = 38,
+ damp = 700,
+ initialBias = 72,
+ initialN = 128, // 0x80
+ delimiter = '-', // '\x2D'
+
+ /** Regular expressions */
+ regexPunycode = /^xn--/,
+ regexNonASCII = /[^\x20-\x7E]/, // unprintable ASCII chars + non-ASCII chars
+ regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g, // RFC 3490 separators
+
+ /** Error messages */
+ errors = {
+ 'overflow': 'Overflow: input needs wider integers to process',
+ 'not-basic': 'Illegal input >= 0x80 (not a basic code point)',
+ 'invalid-input': 'Invalid input'
+ },
+
+ /** Convenience shortcuts */
+ baseMinusTMin = base - tMin,
+ floor = Math.floor,
+ stringFromCharCode = String.fromCharCode,
+
+ /** Temporary variable */
+ key;
+
+ /*--------------------------------------------------------------------------*/
+
+ /**
+ * A generic error utility function.
+ * @private
+ * @param {String} type The error type.
+ * @returns {Error} Throws a `RangeError` with the applicable error message.
+ */
+ function error(type) {
+ throw new RangeError(errors[type]);
+ }
+
+ /**
+ * A generic `Array#map` utility function.
+ * @private
+ * @param {Array} array The array to iterate over.
+ * @param {Function} callback The function that gets called for every array
+ * item.
+ * @returns {Array} A new array of values returned by the callback function.
+ */
+ function map(array, fn) {
+ var length = array.length;
+ var result = [];
+ while (length--) {
+ result[length] = fn(array[length]);
+ }
+ return result;
+ }
+
+ /**
+ * A simple `Array#map`-like wrapper to work with domain name strings or email
+ * addresses.
+ * @private
+ * @param {String} domain The domain name or email address.
+ * @param {Function} callback The function that gets called for every
+ * character.
+ * @returns {Array} A new string of characters returned by the callback
+ * function.
+ */
+ function mapDomain(string, fn) {
+ var parts = string.split('@');
+ var result = '';
+ if (parts.length > 1) {
+ // In email addresses, only the domain name should be punycoded. Leave
+ // the local part (i.e. everything up to `@`) intact.
+ result = parts[0] + '@';
+ string = parts[1];
+ }
+ // Avoid `split(regex)` for IE8 compatibility. See #17.
+ string = string.replace(regexSeparators, '\x2E');
+ var labels = string.split('.');
+ var encoded = map(labels, fn).join('.');
+ return result + encoded;
+ }
+
+ /**
+ * Creates an array containing the numeric code points of each Unicode
+ * character in the string. While JavaScript uses UCS-2 internally,
+ * this function will convert a pair of surrogate halves (each of which
+ * UCS-2 exposes as separate characters) into a single code point,
+ * matching UTF-16.
+ * @see `punycode.ucs2.encode`
+ * @see
+ * @memberOf punycode.ucs2
+ * @name decode
+ * @param {String} string The Unicode input string (UCS-2).
+ * @returns {Array} The new array of code points.
+ */
+ function ucs2decode(string) {
+ var output = [],
+ counter = 0,
+ length = string.length,
+ value,
+ extra;
+ while (counter < length) {
+ value = string.charCodeAt(counter++);
+ if (value >= 0xD800 && value <= 0xDBFF && counter < length) {
+ // high surrogate, and there is a next character
+ extra = string.charCodeAt(counter++);
+ if ((extra & 0xFC00) == 0xDC00) { // low surrogate
+ output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000);
+ } else {
+ // unmatched surrogate; only append this code unit, in case the next
+ // code unit is the high surrogate of a surrogate pair
+ output.push(value);
+ counter--;
+ }
+ } else {
+ output.push(value);
+ }
+ }
+ return output;
+ }
+
+ /**
+ * Creates a string based on an array of numeric code points.
+ * @see `punycode.ucs2.decode`
+ * @memberOf punycode.ucs2
+ * @name encode
+ * @param {Array} codePoints The array of numeric code points.
+ * @returns {String} The new Unicode string (UCS-2).
+ */
+ function ucs2encode(array) {
+ return map(array, function(value) {
+ var output = '';
+ if (value > 0xFFFF) {
+ value -= 0x10000;
+ output += stringFromCharCode(value >>> 10 & 0x3FF | 0xD800);
+ value = 0xDC00 | value & 0x3FF;
+ }
+ output += stringFromCharCode(value);
+ return output;
+ }).join('');
+ }
+
+ /**
+ * Converts a basic code point into a digit/integer.
+ * @see `digitToBasic()`
+ * @private
+ * @param {Number} codePoint The basic numeric code point value.
+ * @returns {Number} The numeric value of a basic code point (for use in
+ * representing integers) in the range `0` to `base - 1`, or `base` if
+ * the code point does not represent a value.
+ */
+ function basicToDigit(codePoint) {
+ if (codePoint - 48 < 10) {
+ return codePoint - 22;
+ }
+ if (codePoint - 65 < 26) {
+ return codePoint - 65;
+ }
+ if (codePoint - 97 < 26) {
+ return codePoint - 97;
+ }
+ return base;
+ }
+
+ /**
+ * Converts a digit/integer into a basic code point.
+ * @see `basicToDigit()`
+ * @private
+ * @param {Number} digit The numeric value of a basic code point.
+ * @returns {Number} The basic code point whose value (when used for
+ * representing integers) is `digit`, which needs to be in the range
+ * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is
+ * used; else, the lowercase form is used. The behavior is undefined
+ * if `flag` is non-zero and `digit` has no uppercase form.
+ */
+ function digitToBasic(digit, flag) {
+ // 0..25 map to ASCII a..z or A..Z
+ // 26..35 map to ASCII 0..9
+ return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5);
+ }
+
+ /**
+ * Bias adaptation function as per section 3.4 of RFC 3492.
+ * https://tools.ietf.org/html/rfc3492#section-3.4
+ * @private
+ */
+ function adapt(delta, numPoints, firstTime) {
+ var k = 0;
+ delta = firstTime ? floor(delta / damp) : delta >> 1;
+ delta += floor(delta / numPoints);
+ for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) {
+ delta = floor(delta / baseMinusTMin);
+ }
+ return floor(k + (baseMinusTMin + 1) * delta / (delta + skew));
+ }
+
+ /**
+ * Converts a Punycode string of ASCII-only symbols to a string of Unicode
+ * symbols.
+ * @memberOf punycode
+ * @param {String} input The Punycode string of ASCII-only symbols.
+ * @returns {String} The resulting string of Unicode symbols.
+ */
+ function decode(input) {
+ // Don't use UCS-2
+ var output = [],
+ inputLength = input.length,
+ out,
+ i = 0,
+ n = initialN,
+ bias = initialBias,
+ basic,
+ j,
+ index,
+ oldi,
+ w,
+ k,
+ digit,
+ t,
+ /** Cached calculation results */
+ baseMinusT;
+
+ // Handle the basic code points: let `basic` be the number of input code
+ // points before the last delimiter, or `0` if there is none, then copy
+ // the first basic code points to the output.
+
+ basic = input.lastIndexOf(delimiter);
+ if (basic < 0) {
+ basic = 0;
+ }
+
+ for (j = 0; j < basic; ++j) {
+ // if it's not a basic code point
+ if (input.charCodeAt(j) >= 0x80) {
+ error('not-basic');
+ }
+ output.push(input.charCodeAt(j));
+ }
+
+ // Main decoding loop: start just after the last delimiter if any basic code
+ // points were copied; start at the beginning otherwise.
+
+ for (index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) {
+
+ // `index` is the index of the next character to be consumed.
+ // Decode a generalized variable-length integer into `delta`,
+ // which gets added to `i`. The overflow checking is easier
+ // if we increase `i` as we go, then subtract off its starting
+ // value at the end to obtain `delta`.
+ for (oldi = i, w = 1, k = base; /* no condition */; k += base) {
+
+ if (index >= inputLength) {
+ error('invalid-input');
+ }
+
+ digit = basicToDigit(input.charCodeAt(index++));
+
+ if (digit >= base || digit > floor((maxInt - i) / w)) {
+ error('overflow');
+ }
+
+ i += digit * w;
+ t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias);
+
+ if (digit < t) {
+ break;
+ }
+
+ baseMinusT = base - t;
+ if (w > floor(maxInt / baseMinusT)) {
+ error('overflow');
+ }
+
+ w *= baseMinusT;
+
+ }
+
+ out = output.length + 1;
+ bias = adapt(i - oldi, out, oldi == 0);
+
+ // `i` was supposed to wrap around from `out` to `0`,
+ // incrementing `n` each time, so we'll fix that now:
+ if (floor(i / out) > maxInt - n) {
+ error('overflow');
+ }
+
+ n += floor(i / out);
+ i %= out;
+
+ // Insert `n` at position `i` of the output
+ output.splice(i++, 0, n);
+
+ }
+
+ return ucs2encode(output);
+ }
+
+ /**
+ * Converts a string of Unicode symbols (e.g. a domain name label) to a
+ * Punycode string of ASCII-only symbols.
+ * @memberOf punycode
+ * @param {String} input The string of Unicode symbols.
+ * @returns {String} The resulting Punycode string of ASCII-only symbols.
+ */
+ function encode(input) {
+ var n,
+ delta,
+ handledCPCount,
+ basicLength,
+ bias,
+ j,
+ m,
+ q,
+ k,
+ t,
+ currentValue,
+ output = [],
+ /** `inputLength` will hold the number of code points in `input`. */
+ inputLength,
+ /** Cached calculation results */
+ handledCPCountPlusOne,
+ baseMinusT,
+ qMinusT;
+
+ // Convert the input in UCS-2 to Unicode
+ input = ucs2decode(input);
+
+ // Cache the length
+ inputLength = input.length;
+
+ // Initialize the state
+ n = initialN;
+ delta = 0;
+ bias = initialBias;
+
+ // Handle the basic code points
+ for (j = 0; j < inputLength; ++j) {
+ currentValue = input[j];
+ if (currentValue < 0x80) {
+ output.push(stringFromCharCode(currentValue));
+ }
+ }
+
+ handledCPCount = basicLength = output.length;
+
+ // `handledCPCount` is the number of code points that have been handled;
+ // `basicLength` is the number of basic code points.
+
+ // Finish the basic string - if it is not empty - with a delimiter
+ if (basicLength) {
+ output.push(delimiter);
+ }
+
+ // Main encoding loop:
+ while (handledCPCount < inputLength) {
+
+ // All non-basic code points < n have been handled already. Find the next
+ // larger one:
+ for (m = maxInt, j = 0; j < inputLength; ++j) {
+ currentValue = input[j];
+ if (currentValue >= n && currentValue < m) {
+ m = currentValue;
+ }
+ }
+
+ // Increase `delta` enough to advance the decoder's state to ,
+ // but guard against overflow
+ handledCPCountPlusOne = handledCPCount + 1;
+ if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) {
+ error('overflow');
+ }
+
+ delta += (m - n) * handledCPCountPlusOne;
+ n = m;
+
+ for (j = 0; j < inputLength; ++j) {
+ currentValue = input[j];
+
+ if (currentValue < n && ++delta > maxInt) {
+ error('overflow');
+ }
+
+ if (currentValue == n) {
+ // Represent delta as a generalized variable-length integer
+ for (q = delta, k = base; /* no condition */; k += base) {
+ t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias);
+ if (q < t) {
+ break;
+ }
+ qMinusT = q - t;
+ baseMinusT = base - t;
+ output.push(
+ stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0))
+ );
+ q = floor(qMinusT / baseMinusT);
+ }
+
+ output.push(stringFromCharCode(digitToBasic(q, 0)));
+ bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength);
+ delta = 0;
+ ++handledCPCount;
+ }
+ }
+
+ ++delta;
+ ++n;
+
+ }
+ return output.join('');
+ }
+
+ /**
+ * Converts a Punycode string representing a domain name or an email address
+ * to Unicode. Only the Punycoded parts of the input will be converted, i.e.
+ * it doesn't matter if you call it on a string that has already been
+ * converted to Unicode.
+ * @memberOf punycode
+ * @param {String} input The Punycoded domain name or email address to
+ * convert to Unicode.
+ * @returns {String} The Unicode representation of the given Punycode
+ * string.
+ */
+ function toUnicode(input) {
+ return mapDomain(input, function(string) {
+ return regexPunycode.test(string)
+ ? decode(string.slice(4).toLowerCase())
+ : string;
+ });
+ }
+
+ /**
+ * Converts a Unicode string representing a domain name or an email address to
+ * Punycode. Only the non-ASCII parts of the domain name will be converted,
+ * i.e. it doesn't matter if you call it with a domain that's already in
+ * ASCII.
+ * @memberOf punycode
+ * @param {String} input The domain name or email address to convert, as a
+ * Unicode string.
+ * @returns {String} The Punycode representation of the given domain name or
+ * email address.
+ */
+ function toASCII(input) {
+ return mapDomain(input, function(string) {
+ return regexNonASCII.test(string)
+ ? 'xn--' + encode(string)
+ : string;
+ });
+ }
+
+ /*--------------------------------------------------------------------------*/
+
+ /** Define the public API */
+ punycode = {
+ /**
+ * A string representing the current Punycode.js version number.
+ * @memberOf punycode
+ * @type String
+ */
+ 'version': '1.4.1',
+ /**
+ * An object of methods to convert from JavaScript's internal character
+ * representation (UCS-2) to Unicode code points, and back.
+ * @see
+ * @memberOf punycode
+ * @type Object
+ */
+ 'ucs2': {
+ 'decode': ucs2decode,
+ 'encode': ucs2encode
+ },
+ 'decode': decode,
+ 'encode': encode,
+ 'toASCII': toASCII,
+ 'toUnicode': toUnicode
+ };
+
+ /** Expose `punycode` */
+ // Some AMD build optimizers, like r.js, check for specific condition patterns
+ // like the following:
+ if (
+ typeof define == 'function' &&
+ typeof define.amd == 'object' &&
+ define.amd
+ ) {
+ define('punycode', function() {
+ return punycode;
+ });
+ } else if (freeExports && freeModule) {
+ if (module.exports == freeExports) {
+ // in Node.js, io.js, or RingoJS v0.8.0+
+ freeModule.exports = punycode;
+ } else {
+ // in Narwhal or RingoJS v0.7.0-
+ for (key in punycode) {
+ punycode.hasOwnProperty(key) && (freeExports[key] = punycode[key]);
+ }
+ }
+ } else {
+ // in Rhino or a web browser
+ root.punycode = punycode;
+ }
+
+}(this));
+
+}).call(this,typeof global !== "undefined" ? global : typeof self !== "undefined" ? self : typeof window !== "undefined" ? window : {})
+},{}]},{},[2])(2)
+});
diff --git a/familyark/app/node_modules/psl/dist/psl.min.js b/familyark/app/node_modules/psl/dist/psl.min.js
new file mode 100644
index 0000000..d5c787e
--- /dev/null
+++ b/familyark/app/node_modules/psl/dist/psl.min.js
@@ -0,0 +1 @@
+!function(a){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=a();else if("function"==typeof define&&define.amd)define([],a);else{("undefined"!=typeof window?window:"undefined"!=typeof global?global:"undefined"!=typeof self?self:this).psl=a()}}(function(){return function s(m,t,u){function r(o,a){if(!t[o]){if(!m[o]){var i="function"==typeof require&&require;if(!a&&i)return i(o,!0);if(p)return p(o,!0);var e=new Error("Cannot find module '"+o+"'");throw e.code="MODULE_NOT_FOUND",e}var n=t[o]={exports:{}};m[o][0].call(n.exports,function(a){return r(m[o][1][a]||a)},n,n.exports,s,m,t,u)}return t[o].exports}for(var p="function"==typeof require&&require,a=0;a= 0x80 (not a basic code point)","invalid-input":"Invalid input"},c=b-y,x=Math.floor,q=String.fromCharCode;function A(a){throw new RangeError(k[a])}function l(a,o){for(var i=a.length,e=[];i--;)e[i]=o(a[i]);return e}function g(a,o){var i=a.split("@"),e="";return 1>>10&1023|55296),a=56320|1023&a),o+=q(a)}).join("")}function L(a,o){return a+22+75*(a<26)-((0!=o)<<5)}function I(a,o,i){var e=0;for(a=i?x(a/t):a>>1,a+=x(a/o);c*f>>1x((d-g)/m))&&A("overflow"),g+=u*m,!(u<(r=t<=j?y:j+f<=t?f:t-j));t+=b)m>x(d/(p=b-r))&&A("overflow"),m*=p;j=I(g-s,o=c.length+1,0==s),x(g/o)>d-h&&A("overflow"),h+=x(g/o),g%=o,c.splice(g++,0,h)}return _(c)}function j(a){var o,i,e,n,s,m,t,u,r,p,k,c,l,g,h,j=[];for(c=(a=O(a)).length,o=w,s=v,m=i=0;mx((d-i)/(l=e+1))&&A("overflow"),i+=(t-o)*l,o=t,m=0;md&&A("overflow"),k==o){for(u=i,r=b;!(u<(p=r<=s?y:s+f<=r?f:r-s));r+=b)h=u-p,g=b-p,j.push(q(L(p+h%g,0))),u=x(h/g);j.push(q(L(u,0))),s=I(i,l,e==n),i=0,++e}++i,++o}return j.join("")}if(n={version:"1.4.1",ucs2:{decode:O,encode:_},decode:h,encode:j,toASCII:function(a){return g(a,function(a){return r.test(a)?"xn--"+j(a):a})},toUnicode:function(a){return g(a,function(a){return u.test(a)?h(a.slice(4).toLowerCase()):a})}},0,o&&i)if(T.exports==o)i.exports=n;else for(s in n)n.hasOwnProperty(s)&&(o[s]=n[s]);else a.punycode=n}(this)}).call(this,"undefined"!=typeof global?global:"undefined"!=typeof self?self:"undefined"!=typeof window?window:{})},{}]},{},[2])(2)});
diff --git a/familyark/app/node_modules/psl/index.js b/familyark/app/node_modules/psl/index.js
new file mode 100644
index 0000000..da7bc12
--- /dev/null
+++ b/familyark/app/node_modules/psl/index.js
@@ -0,0 +1,269 @@
+/*eslint no-var:0, prefer-arrow-callback: 0, object-shorthand: 0 */
+'use strict';
+
+
+var Punycode = require('punycode');
+
+
+var internals = {};
+
+
+//
+// Read rules from file.
+//
+internals.rules = require('./data/rules.json').map(function (rule) {
+
+ return {
+ rule: rule,
+ suffix: rule.replace(/^(\*\.|\!)/, ''),
+ punySuffix: -1,
+ wildcard: rule.charAt(0) === '*',
+ exception: rule.charAt(0) === '!'
+ };
+});
+
+
+//
+// Check is given string ends with `suffix`.
+//
+internals.endsWith = function (str, suffix) {
+
+ return str.indexOf(suffix, str.length - suffix.length) !== -1;
+};
+
+
+//
+// Find rule for a given domain.
+//
+internals.findRule = function (domain) {
+
+ var punyDomain = Punycode.toASCII(domain);
+ return internals.rules.reduce(function (memo, rule) {
+
+ if (rule.punySuffix === -1){
+ rule.punySuffix = Punycode.toASCII(rule.suffix);
+ }
+ if (!internals.endsWith(punyDomain, '.' + rule.punySuffix) && punyDomain !== rule.punySuffix) {
+ return memo;
+ }
+ // This has been commented out as it never seems to run. This is because
+ // sub tlds always appear after their parents and we never find a shorter
+ // match.
+ //if (memo) {
+ // var memoSuffix = Punycode.toASCII(memo.suffix);
+ // if (memoSuffix.length >= punySuffix.length) {
+ // return memo;
+ // }
+ //}
+ return rule;
+ }, null);
+};
+
+
+//
+// Error codes and messages.
+//
+exports.errorCodes = {
+ DOMAIN_TOO_SHORT: 'Domain name too short.',
+ DOMAIN_TOO_LONG: 'Domain name too long. It should be no more than 255 chars.',
+ LABEL_STARTS_WITH_DASH: 'Domain name label can not start with a dash.',
+ LABEL_ENDS_WITH_DASH: 'Domain name label can not end with a dash.',
+ LABEL_TOO_LONG: 'Domain name label should be at most 63 chars long.',
+ LABEL_TOO_SHORT: 'Domain name label should be at least 1 character long.',
+ LABEL_INVALID_CHARS: 'Domain name label can only contain alphanumeric characters or dashes.'
+};
+
+
+//
+// Validate domain name and throw if not valid.
+//
+// From wikipedia:
+//
+// Hostnames are composed of series of labels concatenated with dots, as are all
+// domain names. Each label must be between 1 and 63 characters long, and the
+// entire hostname (including the delimiting dots) has a maximum of 255 chars.
+//
+// Allowed chars:
+//
+// * `a-z`
+// * `0-9`
+// * `-` but not as a starting or ending character
+// * `.` as a separator for the textual portions of a domain name
+//
+// * http://en.wikipedia.org/wiki/Domain_name
+// * http://en.wikipedia.org/wiki/Hostname
+//
+internals.validate = function (input) {
+
+ // Before we can validate we need to take care of IDNs with unicode chars.
+ var ascii = Punycode.toASCII(input);
+
+ if (ascii.length < 1) {
+ return 'DOMAIN_TOO_SHORT';
+ }
+ if (ascii.length > 255) {
+ return 'DOMAIN_TOO_LONG';
+ }
+
+ // Check each part's length and allowed chars.
+ var labels = ascii.split('.');
+ var label;
+
+ for (var i = 0; i < labels.length; ++i) {
+ label = labels[i];
+ if (!label.length) {
+ return 'LABEL_TOO_SHORT';
+ }
+ if (label.length > 63) {
+ return 'LABEL_TOO_LONG';
+ }
+ if (label.charAt(0) === '-') {
+ return 'LABEL_STARTS_WITH_DASH';
+ }
+ if (label.charAt(label.length - 1) === '-') {
+ return 'LABEL_ENDS_WITH_DASH';
+ }
+ if (!/^[a-z0-9\-]+$/.test(label)) {
+ return 'LABEL_INVALID_CHARS';
+ }
+ }
+};
+
+
+//
+// Public API
+//
+
+
+//
+// Parse domain.
+//
+exports.parse = function (input) {
+
+ if (typeof input !== 'string') {
+ throw new TypeError('Domain name must be a string.');
+ }
+
+ // Force domain to lowercase.
+ var domain = input.slice(0).toLowerCase();
+
+ // Handle FQDN.
+ // TODO: Simply remove trailing dot?
+ if (domain.charAt(domain.length - 1) === '.') {
+ domain = domain.slice(0, domain.length - 1);
+ }
+
+ // Validate and sanitise input.
+ var error = internals.validate(domain);
+ if (error) {
+ return {
+ input: input,
+ error: {
+ message: exports.errorCodes[error],
+ code: error
+ }
+ };
+ }
+
+ var parsed = {
+ input: input,
+ tld: null,
+ sld: null,
+ domain: null,
+ subdomain: null,
+ listed: false
+ };
+
+ var domainParts = domain.split('.');
+
+ // Non-Internet TLD
+ if (domainParts[domainParts.length - 1] === 'local') {
+ return parsed;
+ }
+
+ var handlePunycode = function () {
+
+ if (!/xn--/.test(domain)) {
+ return parsed;
+ }
+ if (parsed.domain) {
+ parsed.domain = Punycode.toASCII(parsed.domain);
+ }
+ if (parsed.subdomain) {
+ parsed.subdomain = Punycode.toASCII(parsed.subdomain);
+ }
+ return parsed;
+ };
+
+ var rule = internals.findRule(domain);
+
+ // Unlisted tld.
+ if (!rule) {
+ if (domainParts.length < 2) {
+ return parsed;
+ }
+ parsed.tld = domainParts.pop();
+ parsed.sld = domainParts.pop();
+ parsed.domain = [parsed.sld, parsed.tld].join('.');
+ if (domainParts.length) {
+ parsed.subdomain = domainParts.pop();
+ }
+ return handlePunycode();
+ }
+
+ // At this point we know the public suffix is listed.
+ parsed.listed = true;
+
+ var tldParts = rule.suffix.split('.');
+ var privateParts = domainParts.slice(0, domainParts.length - tldParts.length);
+
+ if (rule.exception) {
+ privateParts.push(tldParts.shift());
+ }
+
+ parsed.tld = tldParts.join('.');
+
+ if (!privateParts.length) {
+ return handlePunycode();
+ }
+
+ if (rule.wildcard) {
+ tldParts.unshift(privateParts.pop());
+ parsed.tld = tldParts.join('.');
+ }
+
+ if (!privateParts.length) {
+ return handlePunycode();
+ }
+
+ parsed.sld = privateParts.pop();
+ parsed.domain = [parsed.sld, parsed.tld].join('.');
+
+ if (privateParts.length) {
+ parsed.subdomain = privateParts.join('.');
+ }
+
+ return handlePunycode();
+};
+
+
+//
+// Get domain.
+//
+exports.get = function (domain) {
+
+ if (!domain) {
+ return null;
+ }
+ return exports.parse(domain).domain || null;
+};
+
+
+//
+// Check whether domain belongs to a known public suffix.
+//
+exports.isValid = function (domain) {
+
+ var parsed = exports.parse(domain);
+ return Boolean(parsed.domain && parsed.listed);
+};
diff --git a/familyark/app/node_modules/psl/package.json b/familyark/app/node_modules/psl/package.json
new file mode 100644
index 0000000..2984fac
--- /dev/null
+++ b/familyark/app/node_modules/psl/package.json
@@ -0,0 +1,77 @@
+{
+ "_from": "psl@^1.1.28",
+ "_id": "psl@1.8.0",
+ "_inBundle": false,
+ "_integrity": "sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ==",
+ "_location": "/psl",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "psl@^1.1.28",
+ "name": "psl",
+ "escapedName": "psl",
+ "rawSpec": "^1.1.28",
+ "saveSpec": null,
+ "fetchSpec": "^1.1.28"
+ },
+ "_requiredBy": [
+ "/tough-cookie"
+ ],
+ "_resolved": "https://registry.npmjs.org/psl/-/psl-1.8.0.tgz",
+ "_shasum": "9326f8bcfb013adcc005fdff056acce020e51c24",
+ "_spec": "psl@^1.1.28",
+ "_where": "/home/g/Workspace/hatthieves/familyARK/familyark/app/node_modules/tough-cookie",
+ "author": {
+ "name": "Lupo Montero",
+ "email": "lupomontero@gmail.com",
+ "url": "https://lupomontero.com/"
+ },
+ "bugs": {
+ "url": "https://github.com/lupomontero/psl/issues"
+ },
+ "bundleDependencies": false,
+ "deprecated": false,
+ "description": "Domain name parser based on the Public Suffix List",
+ "devDependencies": {
+ "JSONStream": "^1.3.5",
+ "browserify": "^16.5.0",
+ "commit-and-pr": "^1.0.4",
+ "eslint": "^6.8.0",
+ "eslint-config-hapi": "^12.0.0",
+ "eslint-plugin-hapi": "^4.1.0",
+ "karma": "^4.4.1",
+ "karma-browserify": "^7.0.0",
+ "karma-mocha": "^1.3.0",
+ "karma-mocha-reporter": "^2.2.5",
+ "karma-phantomjs-launcher": "^1.0.4",
+ "mocha": "^7.1.1",
+ "phantomjs-prebuilt": "^2.1.16",
+ "request": "^2.88.2",
+ "uglify-js": "^3.8.0",
+ "watchify": "^3.11.1"
+ },
+ "homepage": "https://github.com/lupomontero/psl#readme",
+ "keywords": [
+ "publicsuffix",
+ "publicsuffixlist"
+ ],
+ "license": "MIT",
+ "main": "index.js",
+ "name": "psl",
+ "repository": {
+ "type": "git",
+ "url": "git+ssh://git@github.com/lupomontero/psl.git"
+ },
+ "scripts": {
+ "build": "browserify ./index.js --standalone=psl > ./dist/psl.js",
+ "changelog": "git log $(git describe --tags --abbrev=0)..HEAD --oneline --format=\"%h %s (%an <%ae>)\"",
+ "commit-and-pr": "commit-and-pr",
+ "postbuild": "cat ./dist/psl.js | uglifyjs -c -m > ./dist/psl.min.js",
+ "prebuild": "./scripts/update-rules.js",
+ "pretest": "eslint .",
+ "test": "mocha test && karma start ./karma.conf.js --single-run",
+ "watch": "mocha test --watch"
+ },
+ "version": "1.8.0"
+}
diff --git a/familyark/app/node_modules/punycode/LICENSE-MIT.txt b/familyark/app/node_modules/punycode/LICENSE-MIT.txt
new file mode 100644
index 0000000..a41e0a7
--- /dev/null
+++ b/familyark/app/node_modules/punycode/LICENSE-MIT.txt
@@ -0,0 +1,20 @@
+Copyright Mathias Bynens
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/familyark/app/node_modules/punycode/README.md b/familyark/app/node_modules/punycode/README.md
new file mode 100644
index 0000000..ee2f9d6
--- /dev/null
+++ b/familyark/app/node_modules/punycode/README.md
@@ -0,0 +1,122 @@
+# Punycode.js [](https://travis-ci.org/bestiejs/punycode.js) [](https://codecov.io/gh/bestiejs/punycode.js) [](https://gemnasium.com/bestiejs/punycode.js)
+
+Punycode.js is a robust Punycode converter that fully complies to [RFC 3492](https://tools.ietf.org/html/rfc3492) and [RFC 5891](https://tools.ietf.org/html/rfc5891).
+
+This JavaScript library is the result of comparing, optimizing and documenting different open-source implementations of the Punycode algorithm:
+
+* [The C example code from RFC 3492](https://tools.ietf.org/html/rfc3492#appendix-C)
+* [`punycode.c` by _Markus W. Scherer_ (IBM)](http://opensource.apple.com/source/ICU/ICU-400.42/icuSources/common/punycode.c)
+* [`punycode.c` by _Ben Noordhuis_](https://github.com/bnoordhuis/punycode/blob/master/punycode.c)
+* [JavaScript implementation by _some_](http://stackoverflow.com/questions/183485/can-anyone-recommend-a-good-free-javascript-for-punycode-to-unicode-conversion/301287#301287)
+* [`punycode.js` by _Ben Noordhuis_](https://github.com/joyent/node/blob/426298c8c1c0d5b5224ac3658c41e7c2a3fe9377/lib/punycode.js) (note: [not fully compliant](https://github.com/joyent/node/issues/2072))
+
+This project was [bundled](https://github.com/joyent/node/blob/master/lib/punycode.js) with Node.js from [v0.6.2+](https://github.com/joyent/node/compare/975f1930b1...61e796decc) until [v7](https://github.com/nodejs/node/pull/7941) (soft-deprecated).
+
+The current version supports recent versions of Node.js only. It provides a CommonJS module and an ES6 module. For the old version that offers the same functionality with broader support, including Rhino, Ringo, Narwhal, and web browsers, see [v1.4.1](https://github.com/bestiejs/punycode.js/releases/tag/v1.4.1).
+
+## Installation
+
+Via [npm](https://www.npmjs.com/):
+
+```bash
+npm install punycode --save
+```
+
+In [Node.js](https://nodejs.org/):
+
+```js
+const punycode = require('punycode');
+```
+
+## API
+
+### `punycode.decode(string)`
+
+Converts a Punycode string of ASCII symbols to a string of Unicode symbols.
+
+```js
+// decode domain name parts
+punycode.decode('maana-pta'); // 'mañana'
+punycode.decode('--dqo34k'); // '☃-⌘'
+```
+
+### `punycode.encode(string)`
+
+Converts a string of Unicode symbols to a Punycode string of ASCII symbols.
+
+```js
+// encode domain name parts
+punycode.encode('mañana'); // 'maana-pta'
+punycode.encode('☃-⌘'); // '--dqo34k'
+```
+
+### `punycode.toUnicode(input)`
+
+Converts a Punycode string representing a domain name or an email address to Unicode. Only the Punycoded parts of the input will be converted, i.e. it doesn’t matter if you call it on a string that has already been converted to Unicode.
+
+```js
+// decode domain names
+punycode.toUnicode('xn--maana-pta.com');
+// → 'mañana.com'
+punycode.toUnicode('xn----dqo34k.com');
+// → '☃-⌘.com'
+
+// decode email addresses
+punycode.toUnicode('джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq');
+// → 'джумла@джpумлатест.bрфa'
+```
+
+### `punycode.toASCII(input)`
+
+Converts a lowercased Unicode string representing a domain name or an email address to Punycode. Only the non-ASCII parts of the input will be converted, i.e. it doesn’t matter if you call it with a domain that’s already in ASCII.
+
+```js
+// encode domain names
+punycode.toASCII('mañana.com');
+// → 'xn--maana-pta.com'
+punycode.toASCII('☃-⌘.com');
+// → 'xn----dqo34k.com'
+
+// encode email addresses
+punycode.toASCII('джумла@джpумлатест.bрфa');
+// → 'джумла@xn--p-8sbkgc5ag7bhce.xn--ba-lmcq'
+```
+
+### `punycode.ucs2`
+
+#### `punycode.ucs2.decode(string)`
+
+Creates an array containing the numeric code point values of each Unicode symbol in the string. While [JavaScript uses UCS-2 internally](https://mathiasbynens.be/notes/javascript-encoding), this function will convert a pair of surrogate halves (each of which UCS-2 exposes as separate characters) into a single code point, matching UTF-16.
+
+```js
+punycode.ucs2.decode('abc');
+// → [0x61, 0x62, 0x63]
+// surrogate pair for U+1D306 TETRAGRAM FOR CENTRE:
+punycode.ucs2.decode('\uD834\uDF06');
+// → [0x1D306]
+```
+
+#### `punycode.ucs2.encode(codePoints)`
+
+Creates a string based on an array of numeric code point values.
+
+```js
+punycode.ucs2.encode([0x61, 0x62, 0x63]);
+// → 'abc'
+punycode.ucs2.encode([0x1D306]);
+// → '\uD834\uDF06'
+```
+
+### `punycode.version`
+
+A string representing the current Punycode.js version number.
+
+## Author
+
+| [](https://twitter.com/mathias "Follow @mathias on Twitter") |
+|---|
+| [Mathias Bynens](https://mathiasbynens.be/) |
+
+## License
+
+Punycode.js is available under the [MIT](https://mths.be/mit) license.
diff --git a/familyark/app/node_modules/punycode/package.json b/familyark/app/node_modules/punycode/package.json
new file mode 100644
index 0000000..af6d7f5
--- /dev/null
+++ b/familyark/app/node_modules/punycode/package.json
@@ -0,0 +1,86 @@
+{
+ "_from": "punycode@^2.1.0",
+ "_id": "punycode@2.1.1",
+ "_inBundle": false,
+ "_integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==",
+ "_location": "/punycode",
+ "_phantomChildren": {},
+ "_requested": {
+ "type": "range",
+ "registry": true,
+ "raw": "punycode@^2.1.0",
+ "name": "punycode",
+ "escapedName": "punycode",
+ "rawSpec": "^2.1.0",
+ "saveSpec": null,
+ "fetchSpec": "^2.1.0"
+ },
+ "_requiredBy": [
+ "/tough-cookie",
+ "/uri-js"
+ ],
+ "_resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz",
+ "_shasum": "b58b010ac40c22c5657616c8d2c2c02c7bf479ec",
+ "_spec": "punycode@^2.1.0",
+ "_where": "/home/g/Workspace/hatthieves/familyARK/familyark/app/node_modules/uri-js",
+ "author": {
+ "name": "Mathias Bynens",
+ "url": "https://mathiasbynens.be/"
+ },
+ "bugs": {
+ "url": "https://github.com/bestiejs/punycode.js/issues"
+ },
+ "bundleDependencies": false,
+ "contributors": [
+ {
+ "name": "Mathias Bynens",
+ "url": "https://mathiasbynens.be/"
+ }
+ ],
+ "deprecated": false,
+ "description": "A robust Punycode converter that fully complies to RFC 3492 and RFC 5891, and works on nearly all JavaScript platforms.",
+ "devDependencies": {
+ "codecov": "^1.0.1",
+ "istanbul": "^0.4.1",
+ "mocha": "^2.5.3"
+ },
+ "engines": {
+ "node": ">=6"
+ },
+ "files": [
+ "LICENSE-MIT.txt",
+ "punycode.js",
+ "punycode.es6.js"
+ ],
+ "homepage": "https://mths.be/punycode",
+ "jsnext:main": "punycode.es6.js",
+ "jspm": {
+ "map": {
+ "./punycode.js": {
+ "node": "@node/punycode"
+ }
+ }
+ },
+ "keywords": [
+ "punycode",
+ "unicode",
+ "idn",
+ "idna",
+ "dns",
+ "url",
+ "domain"
+ ],
+ "license": "MIT",
+ "main": "punycode.js",
+ "module": "punycode.es6.js",
+ "name": "punycode",
+ "repository": {
+ "type": "git",
+ "url": "git+https://github.com/bestiejs/punycode.js.git"
+ },
+ "scripts": {
+ "prepublish": "node scripts/prepublish.js",
+ "test": "mocha tests"
+ },
+ "version": "2.1.1"
+}
diff --git a/familyark/app/node_modules/punycode/punycode.es6.js b/familyark/app/node_modules/punycode/punycode.es6.js
new file mode 100644
index 0000000..4610bc9
--- /dev/null
+++ b/familyark/app/node_modules/punycode/punycode.es6.js
@@ -0,0 +1,441 @@
+'use strict';
+
+/** Highest positive signed 32-bit float value */
+const maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1
+
+/** Bootstring parameters */
+const base = 36;
+const tMin = 1;
+const tMax = 26;
+const skew = 38;
+const damp = 700;
+const initialBias = 72;
+const initialN = 128; // 0x80
+const delimiter = '-'; // '\x2D'
+
+/** Regular expressions */
+const regexPunycode = /^xn--/;
+const regexNonASCII = /[^\0-\x7E]/; // non-ASCII chars
+const regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; // RFC 3490 separators
+
+/** Error messages */
+const errors = {
+ 'overflow': 'Overflow: input needs wider integers to process',
+ 'not-basic': 'Illegal input >= 0x80 (not a basic code point)',
+ 'invalid-input': 'Invalid input'
+};
+
+/** Convenience shortcuts */
+const baseMinusTMin = base - tMin;
+const floor = Math.floor;
+const stringFromCharCode = String.fromCharCode;
+
+/*--------------------------------------------------------------------------*/
+
+/**
+ * A generic error utility function.
+ * @private
+ * @param {String} type The error type.
+ * @returns {Error} Throws a `RangeError` with the applicable error message.
+ */
+function error(type) {
+ throw new RangeError(errors[type]);
+}
+
+/**
+ * A generic `Array#map` utility function.
+ * @private
+ * @param {Array} array The array to iterate over.
+ * @param {Function} callback The function that gets called for every array
+ * item.
+ * @returns {Array} A new array of values returned by the callback function.
+ */
+function map(array, fn) {
+ const result = [];
+ let length = array.length;
+ while (length--) {
+ result[length] = fn(array[length]);
+ }
+ return result;
+}
+
+/**
+ * A simple `Array#map`-like wrapper to work with domain name strings or email
+ * addresses.
+ * @private
+ * @param {String} domain The domain name or email address.
+ * @param {Function} callback The function that gets called for every
+ * character.
+ * @returns {Array} A new string of characters returned by the callback
+ * function.
+ */
+function mapDomain(string, fn) {
+ const parts = string.split('@');
+ let result = '';
+ if (parts.length > 1) {
+ // In email addresses, only the domain name should be punycoded. Leave
+ // the local part (i.e. everything up to `@`) intact.
+ result = parts[0] + '@';
+ string = parts[1];
+ }
+ // Avoid `split(regex)` for IE8 compatibility. See #17.
+ string = string.replace(regexSeparators, '\x2E');
+ const labels = string.split('.');
+ const encoded = map(labels, fn).join('.');
+ return result + encoded;
+}
+
+/**
+ * Creates an array containing the numeric code points of each Unicode
+ * character in the string. While JavaScript uses UCS-2 internally,
+ * this function will convert a pair of surrogate halves (each of which
+ * UCS-2 exposes as separate characters) into a single code point,
+ * matching UTF-16.
+ * @see `punycode.ucs2.encode`
+ * @see
+ * @memberOf punycode.ucs2
+ * @name decode
+ * @param {String} string The Unicode input string (UCS-2).
+ * @returns {Array} The new array of code points.
+ */
+function ucs2decode(string) {
+ const output = [];
+ let counter = 0;
+ const length = string.length;
+ while (counter < length) {
+ const value = string.charCodeAt(counter++);
+ if (value >= 0xD800 && value <= 0xDBFF && counter < length) {
+ // It's a high surrogate, and there is a next character.
+ const extra = string.charCodeAt(counter++);
+ if ((extra & 0xFC00) == 0xDC00) { // Low surrogate.
+ output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000);
+ } else {
+ // It's an unmatched surrogate; only append this code unit, in case the
+ // next code unit is the high surrogate of a surrogate pair.
+ output.push(value);
+ counter--;
+ }
+ } else {
+ output.push(value);
+ }
+ }
+ return output;
+}
+
+/**
+ * Creates a string based on an array of numeric code points.
+ * @see `punycode.ucs2.decode`
+ * @memberOf punycode.ucs2
+ * @name encode
+ * @param {Array} codePoints The array of numeric code points.
+ * @returns {String} The new Unicode string (UCS-2).
+ */
+const ucs2encode = array => String.fromCodePoint(...array);
+
+/**
+ * Converts a basic code point into a digit/integer.
+ * @see `digitToBasic()`
+ * @private
+ * @param {Number} codePoint The basic numeric code point value.
+ * @returns {Number} The numeric value of a basic code point (for use in
+ * representing integers) in the range `0` to `base - 1`, or `base` if
+ * the code point does not represent a value.
+ */
+const basicToDigit = function(codePoint) {
+ if (codePoint - 0x30 < 0x0A) {
+ return codePoint - 0x16;
+ }
+ if (codePoint - 0x41 < 0x1A) {
+ return codePoint - 0x41;
+ }
+ if (codePoint - 0x61 < 0x1A) {
+ return codePoint - 0x61;
+ }
+ return base;
+};
+
+/**
+ * Converts a digit/integer into a basic code point.
+ * @see `basicToDigit()`
+ * @private
+ * @param {Number} digit The numeric value of a basic code point.
+ * @returns {Number} The basic code point whose value (when used for
+ * representing integers) is `digit`, which needs to be in the range
+ * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is
+ * used; else, the lowercase form is used. The behavior is undefined
+ * if `flag` is non-zero and `digit` has no uppercase form.
+ */
+const digitToBasic = function(digit, flag) {
+ // 0..25 map to ASCII a..z or A..Z
+ // 26..35 map to ASCII 0..9
+ return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5);
+};
+
+/**
+ * Bias adaptation function as per section 3.4 of RFC 3492.
+ * https://tools.ietf.org/html/rfc3492#section-3.4
+ * @private
+ */
+const adapt = function(delta, numPoints, firstTime) {
+ let k = 0;
+ delta = firstTime ? floor(delta / damp) : delta >> 1;
+ delta += floor(delta / numPoints);
+ for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) {
+ delta = floor(delta / baseMinusTMin);
+ }
+ return floor(k + (baseMinusTMin + 1) * delta / (delta + skew));
+};
+
+/**
+ * Converts a Punycode string of ASCII-only symbols to a string of Unicode
+ * symbols.
+ * @memberOf punycode
+ * @param {String} input The Punycode string of ASCII-only symbols.
+ * @returns {String} The resulting string of Unicode symbols.
+ */
+const decode = function(input) {
+ // Don't use UCS-2.
+ const output = [];
+ const inputLength = input.length;
+ let i = 0;
+ let n = initialN;
+ let bias = initialBias;
+
+ // Handle the basic code points: let `basic` be the number of input code
+ // points before the last delimiter, or `0` if there is none, then copy
+ // the first basic code points to the output.
+
+ let basic = input.lastIndexOf(delimiter);
+ if (basic < 0) {
+ basic = 0;
+ }
+
+ for (let j = 0; j < basic; ++j) {
+ // if it's not a basic code point
+ if (input.charCodeAt(j) >= 0x80) {
+ error('not-basic');
+ }
+ output.push(input.charCodeAt(j));
+ }
+
+ // Main decoding loop: start just after the last delimiter if any basic code
+ // points were copied; start at the beginning otherwise.
+
+ for (let index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) {
+
+ // `index` is the index of the next character to be consumed.
+ // Decode a generalized variable-length integer into `delta`,
+ // which gets added to `i`. The overflow checking is easier
+ // if we increase `i` as we go, then subtract off its starting
+ // value at the end to obtain `delta`.
+ let oldi = i;
+ for (let w = 1, k = base; /* no condition */; k += base) {
+
+ if (index >= inputLength) {
+ error('invalid-input');
+ }
+
+ const digit = basicToDigit(input.charCodeAt(index++));
+
+ if (digit >= base || digit > floor((maxInt - i) / w)) {
+ error('overflow');
+ }
+
+ i += digit * w;
+ const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias);
+
+ if (digit < t) {
+ break;
+ }
+
+ const baseMinusT = base - t;
+ if (w > floor(maxInt / baseMinusT)) {
+ error('overflow');
+ }
+
+ w *= baseMinusT;
+
+ }
+
+ const out = output.length + 1;
+ bias = adapt(i - oldi, out, oldi == 0);
+
+ // `i` was supposed to wrap around from `out` to `0`,
+ // incrementing `n` each time, so we'll fix that now:
+ if (floor(i / out) > maxInt - n) {
+ error('overflow');
+ }
+
+ n += floor(i / out);
+ i %= out;
+
+ // Insert `n` at position `i` of the output.
+ output.splice(i++, 0, n);
+
+ }
+
+ return String.fromCodePoint(...output);
+};
+
+/**
+ * Converts a string of Unicode symbols (e.g. a domain name label) to a
+ * Punycode string of ASCII-only symbols.
+ * @memberOf punycode
+ * @param {String} input The string of Unicode symbols.
+ * @returns {String} The resulting Punycode string of ASCII-only symbols.
+ */
+const encode = function(input) {
+ const output = [];
+
+ // Convert the input in UCS-2 to an array of Unicode code points.
+ input = ucs2decode(input);
+
+ // Cache the length.
+ let inputLength = input.length;
+
+ // Initialize the state.
+ let n = initialN;
+ let delta = 0;
+ let bias = initialBias;
+
+ // Handle the basic code points.
+ for (const currentValue of input) {
+ if (currentValue < 0x80) {
+ output.push(stringFromCharCode(currentValue));
+ }
+ }
+
+ let basicLength = output.length;
+ let handledCPCount = basicLength;
+
+ // `handledCPCount` is the number of code points that have been handled;
+ // `basicLength` is the number of basic code points.
+
+ // Finish the basic string with a delimiter unless it's empty.
+ if (basicLength) {
+ output.push(delimiter);
+ }
+
+ // Main encoding loop:
+ while (handledCPCount < inputLength) {
+
+ // All non-basic code points < n have been handled already. Find the next
+ // larger one:
+ let m = maxInt;
+ for (const currentValue of input) {
+ if (currentValue >= n && currentValue < m) {
+ m = currentValue;
+ }
+ }
+
+ // Increase `delta` enough to advance the decoder's state to ,
+ // but guard against overflow.
+ const handledCPCountPlusOne = handledCPCount + 1;
+ if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) {
+ error('overflow');
+ }
+
+ delta += (m - n) * handledCPCountPlusOne;
+ n = m;
+
+ for (const currentValue of input) {
+ if (currentValue < n && ++delta > maxInt) {
+ error('overflow');
+ }
+ if (currentValue == n) {
+ // Represent delta as a generalized variable-length integer.
+ let q = delta;
+ for (let k = base; /* no condition */; k += base) {
+ const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias);
+ if (q < t) {
+ break;
+ }
+ const qMinusT = q - t;
+ const baseMinusT = base - t;
+ output.push(
+ stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0))
+ );
+ q = floor(qMinusT / baseMinusT);
+ }
+
+ output.push(stringFromCharCode(digitToBasic(q, 0)));
+ bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength);
+ delta = 0;
+ ++handledCPCount;
+ }
+ }
+
+ ++delta;
+ ++n;
+
+ }
+ return output.join('');
+};
+
+/**
+ * Converts a Punycode string representing a domain name or an email address
+ * to Unicode. Only the Punycoded parts of the input will be converted, i.e.
+ * it doesn't matter if you call it on a string that has already been
+ * converted to Unicode.
+ * @memberOf punycode
+ * @param {String} input The Punycoded domain name or email address to
+ * convert to Unicode.
+ * @returns {String} The Unicode representation of the given Punycode
+ * string.
+ */
+const toUnicode = function(input) {
+ return mapDomain(input, function(string) {
+ return regexPunycode.test(string)
+ ? decode(string.slice(4).toLowerCase())
+ : string;
+ });
+};
+
+/**
+ * Converts a Unicode string representing a domain name or an email address to
+ * Punycode. Only the non-ASCII parts of the domain name will be converted,
+ * i.e. it doesn't matter if you call it with a domain that's already in
+ * ASCII.
+ * @memberOf punycode
+ * @param {String} input The domain name or email address to convert, as a
+ * Unicode string.
+ * @returns {String} The Punycode representation of the given domain name or
+ * email address.
+ */
+const toASCII = function(input) {
+ return mapDomain(input, function(string) {
+ return regexNonASCII.test(string)
+ ? 'xn--' + encode(string)
+ : string;
+ });
+};
+
+/*--------------------------------------------------------------------------*/
+
+/** Define the public API */
+const punycode = {
+ /**
+ * A string representing the current Punycode.js version number.
+ * @memberOf punycode
+ * @type String
+ */
+ 'version': '2.1.0',
+ /**
+ * An object of methods to convert from JavaScript's internal character
+ * representation (UCS-2) to Unicode code points, and back.
+ * @see
+ * @memberOf punycode
+ * @type Object
+ */
+ 'ucs2': {
+ 'decode': ucs2decode,
+ 'encode': ucs2encode
+ },
+ 'decode': decode,
+ 'encode': encode,
+ 'toASCII': toASCII,
+ 'toUnicode': toUnicode
+};
+
+export { ucs2decode, ucs2encode, decode, encode, toASCII, toUnicode };
+export default punycode;
diff --git a/familyark/app/node_modules/punycode/punycode.js b/familyark/app/node_modules/punycode/punycode.js
new file mode 100644
index 0000000..ea61fd0
--- /dev/null
+++ b/familyark/app/node_modules/punycode/punycode.js
@@ -0,0 +1,440 @@
+'use strict';
+
+/** Highest positive signed 32-bit float value */
+const maxInt = 2147483647; // aka. 0x7FFFFFFF or 2^31-1
+
+/** Bootstring parameters */
+const base = 36;
+const tMin = 1;
+const tMax = 26;
+const skew = 38;
+const damp = 700;
+const initialBias = 72;
+const initialN = 128; // 0x80
+const delimiter = '-'; // '\x2D'
+
+/** Regular expressions */
+const regexPunycode = /^xn--/;
+const regexNonASCII = /[^\0-\x7E]/; // non-ASCII chars
+const regexSeparators = /[\x2E\u3002\uFF0E\uFF61]/g; // RFC 3490 separators
+
+/** Error messages */
+const errors = {
+ 'overflow': 'Overflow: input needs wider integers to process',
+ 'not-basic': 'Illegal input >= 0x80 (not a basic code point)',
+ 'invalid-input': 'Invalid input'
+};
+
+/** Convenience shortcuts */
+const baseMinusTMin = base - tMin;
+const floor = Math.floor;
+const stringFromCharCode = String.fromCharCode;
+
+/*--------------------------------------------------------------------------*/
+
+/**
+ * A generic error utility function.
+ * @private
+ * @param {String} type The error type.
+ * @returns {Error} Throws a `RangeError` with the applicable error message.
+ */
+function error(type) {
+ throw new RangeError(errors[type]);
+}
+
+/**
+ * A generic `Array#map` utility function.
+ * @private
+ * @param {Array} array The array to iterate over.
+ * @param {Function} callback The function that gets called for every array
+ * item.
+ * @returns {Array} A new array of values returned by the callback function.
+ */
+function map(array, fn) {
+ const result = [];
+ let length = array.length;
+ while (length--) {
+ result[length] = fn(array[length]);
+ }
+ return result;
+}
+
+/**
+ * A simple `Array#map`-like wrapper to work with domain name strings or email
+ * addresses.
+ * @private
+ * @param {String} domain The domain name or email address.
+ * @param {Function} callback The function that gets called for every
+ * character.
+ * @returns {Array} A new string of characters returned by the callback
+ * function.
+ */
+function mapDomain(string, fn) {
+ const parts = string.split('@');
+ let result = '';
+ if (parts.length > 1) {
+ // In email addresses, only the domain name should be punycoded. Leave
+ // the local part (i.e. everything up to `@`) intact.
+ result = parts[0] + '@';
+ string = parts[1];
+ }
+ // Avoid `split(regex)` for IE8 compatibility. See #17.
+ string = string.replace(regexSeparators, '\x2E');
+ const labels = string.split('.');
+ const encoded = map(labels, fn).join('.');
+ return result + encoded;
+}
+
+/**
+ * Creates an array containing the numeric code points of each Unicode
+ * character in the string. While JavaScript uses UCS-2 internally,
+ * this function will convert a pair of surrogate halves (each of which
+ * UCS-2 exposes as separate characters) into a single code point,
+ * matching UTF-16.
+ * @see `punycode.ucs2.encode`
+ * @see
+ * @memberOf punycode.ucs2
+ * @name decode
+ * @param {String} string The Unicode input string (UCS-2).
+ * @returns {Array} The new array of code points.
+ */
+function ucs2decode(string) {
+ const output = [];
+ let counter = 0;
+ const length = string.length;
+ while (counter < length) {
+ const value = string.charCodeAt(counter++);
+ if (value >= 0xD800 && value <= 0xDBFF && counter < length) {
+ // It's a high surrogate, and there is a next character.
+ const extra = string.charCodeAt(counter++);
+ if ((extra & 0xFC00) == 0xDC00) { // Low surrogate.
+ output.push(((value & 0x3FF) << 10) + (extra & 0x3FF) + 0x10000);
+ } else {
+ // It's an unmatched surrogate; only append this code unit, in case the
+ // next code unit is the high surrogate of a surrogate pair.
+ output.push(value);
+ counter--;
+ }
+ } else {
+ output.push(value);
+ }
+ }
+ return output;
+}
+
+/**
+ * Creates a string based on an array of numeric code points.
+ * @see `punycode.ucs2.decode`
+ * @memberOf punycode.ucs2
+ * @name encode
+ * @param {Array} codePoints The array of numeric code points.
+ * @returns {String} The new Unicode string (UCS-2).
+ */
+const ucs2encode = array => String.fromCodePoint(...array);
+
+/**
+ * Converts a basic code point into a digit/integer.
+ * @see `digitToBasic()`
+ * @private
+ * @param {Number} codePoint The basic numeric code point value.
+ * @returns {Number} The numeric value of a basic code point (for use in
+ * representing integers) in the range `0` to `base - 1`, or `base` if
+ * the code point does not represent a value.
+ */
+const basicToDigit = function(codePoint) {
+ if (codePoint - 0x30 < 0x0A) {
+ return codePoint - 0x16;
+ }
+ if (codePoint - 0x41 < 0x1A) {
+ return codePoint - 0x41;
+ }
+ if (codePoint - 0x61 < 0x1A) {
+ return codePoint - 0x61;
+ }
+ return base;
+};
+
+/**
+ * Converts a digit/integer into a basic code point.
+ * @see `basicToDigit()`
+ * @private
+ * @param {Number} digit The numeric value of a basic code point.
+ * @returns {Number} The basic code point whose value (when used for
+ * representing integers) is `digit`, which needs to be in the range
+ * `0` to `base - 1`. If `flag` is non-zero, the uppercase form is
+ * used; else, the lowercase form is used. The behavior is undefined
+ * if `flag` is non-zero and `digit` has no uppercase form.
+ */
+const digitToBasic = function(digit, flag) {
+ // 0..25 map to ASCII a..z or A..Z
+ // 26..35 map to ASCII 0..9
+ return digit + 22 + 75 * (digit < 26) - ((flag != 0) << 5);
+};
+
+/**
+ * Bias adaptation function as per section 3.4 of RFC 3492.
+ * https://tools.ietf.org/html/rfc3492#section-3.4
+ * @private
+ */
+const adapt = function(delta, numPoints, firstTime) {
+ let k = 0;
+ delta = firstTime ? floor(delta / damp) : delta >> 1;
+ delta += floor(delta / numPoints);
+ for (/* no initialization */; delta > baseMinusTMin * tMax >> 1; k += base) {
+ delta = floor(delta / baseMinusTMin);
+ }
+ return floor(k + (baseMinusTMin + 1) * delta / (delta + skew));
+};
+
+/**
+ * Converts a Punycode string of ASCII-only symbols to a string of Unicode
+ * symbols.
+ * @memberOf punycode
+ * @param {String} input The Punycode string of ASCII-only symbols.
+ * @returns {String} The resulting string of Unicode symbols.
+ */
+const decode = function(input) {
+ // Don't use UCS-2.
+ const output = [];
+ const inputLength = input.length;
+ let i = 0;
+ let n = initialN;
+ let bias = initialBias;
+
+ // Handle the basic code points: let `basic` be the number of input code
+ // points before the last delimiter, or `0` if there is none, then copy
+ // the first basic code points to the output.
+
+ let basic = input.lastIndexOf(delimiter);
+ if (basic < 0) {
+ basic = 0;
+ }
+
+ for (let j = 0; j < basic; ++j) {
+ // if it's not a basic code point
+ if (input.charCodeAt(j) >= 0x80) {
+ error('not-basic');
+ }
+ output.push(input.charCodeAt(j));
+ }
+
+ // Main decoding loop: start just after the last delimiter if any basic code
+ // points were copied; start at the beginning otherwise.
+
+ for (let index = basic > 0 ? basic + 1 : 0; index < inputLength; /* no final expression */) {
+
+ // `index` is the index of the next character to be consumed.
+ // Decode a generalized variable-length integer into `delta`,
+ // which gets added to `i`. The overflow checking is easier
+ // if we increase `i` as we go, then subtract off its starting
+ // value at the end to obtain `delta`.
+ let oldi = i;
+ for (let w = 1, k = base; /* no condition */; k += base) {
+
+ if (index >= inputLength) {
+ error('invalid-input');
+ }
+
+ const digit = basicToDigit(input.charCodeAt(index++));
+
+ if (digit >= base || digit > floor((maxInt - i) / w)) {
+ error('overflow');
+ }
+
+ i += digit * w;
+ const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias);
+
+ if (digit < t) {
+ break;
+ }
+
+ const baseMinusT = base - t;
+ if (w > floor(maxInt / baseMinusT)) {
+ error('overflow');
+ }
+
+ w *= baseMinusT;
+
+ }
+
+ const out = output.length + 1;
+ bias = adapt(i - oldi, out, oldi == 0);
+
+ // `i` was supposed to wrap around from `out` to `0`,
+ // incrementing `n` each time, so we'll fix that now:
+ if (floor(i / out) > maxInt - n) {
+ error('overflow');
+ }
+
+ n += floor(i / out);
+ i %= out;
+
+ // Insert `n` at position `i` of the output.
+ output.splice(i++, 0, n);
+
+ }
+
+ return String.fromCodePoint(...output);
+};
+
+/**
+ * Converts a string of Unicode symbols (e.g. a domain name label) to a
+ * Punycode string of ASCII-only symbols.
+ * @memberOf punycode
+ * @param {String} input The string of Unicode symbols.
+ * @returns {String} The resulting Punycode string of ASCII-only symbols.
+ */
+const encode = function(input) {
+ const output = [];
+
+ // Convert the input in UCS-2 to an array of Unicode code points.
+ input = ucs2decode(input);
+
+ // Cache the length.
+ let inputLength = input.length;
+
+ // Initialize the state.
+ let n = initialN;
+ let delta = 0;
+ let bias = initialBias;
+
+ // Handle the basic code points.
+ for (const currentValue of input) {
+ if (currentValue < 0x80) {
+ output.push(stringFromCharCode(currentValue));
+ }
+ }
+
+ let basicLength = output.length;
+ let handledCPCount = basicLength;
+
+ // `handledCPCount` is the number of code points that have been handled;
+ // `basicLength` is the number of basic code points.
+
+ // Finish the basic string with a delimiter unless it's empty.
+ if (basicLength) {
+ output.push(delimiter);
+ }
+
+ // Main encoding loop:
+ while (handledCPCount < inputLength) {
+
+ // All non-basic code points < n have been handled already. Find the next
+ // larger one:
+ let m = maxInt;
+ for (const currentValue of input) {
+ if (currentValue >= n && currentValue < m) {
+ m = currentValue;
+ }
+ }
+
+ // Increase `delta` enough to advance the decoder's state to ,
+ // but guard against overflow.
+ const handledCPCountPlusOne = handledCPCount + 1;
+ if (m - n > floor((maxInt - delta) / handledCPCountPlusOne)) {
+ error('overflow');
+ }
+
+ delta += (m - n) * handledCPCountPlusOne;
+ n = m;
+
+ for (const currentValue of input) {
+ if (currentValue < n && ++delta > maxInt) {
+ error('overflow');
+ }
+ if (currentValue == n) {
+ // Represent delta as a generalized variable-length integer.
+ let q = delta;
+ for (let k = base; /* no condition */; k += base) {
+ const t = k <= bias ? tMin : (k >= bias + tMax ? tMax : k - bias);
+ if (q < t) {
+ break;
+ }
+ const qMinusT = q - t;
+ const baseMinusT = base - t;
+ output.push(
+ stringFromCharCode(digitToBasic(t + qMinusT % baseMinusT, 0))
+ );
+ q = floor(qMinusT / baseMinusT);
+ }
+
+ output.push(stringFromCharCode(digitToBasic(q, 0)));
+ bias = adapt(delta, handledCPCountPlusOne, handledCPCount == basicLength);
+ delta = 0;
+ ++handledCPCount;
+ }
+ }
+
+ ++delta;
+ ++n;
+
+ }
+ return output.join('');
+};
+
+/**
+ * Converts a Punycode string representing a domain name or an email address
+ * to Unicode. Only the Punycoded parts of the input will be converted, i.e.
+ * it doesn't matter if you call it on a string that has already been
+ * converted to Unicode.
+ * @memberOf punycode
+ * @param {String} input The Punycoded domain name or email address to
+ * convert to Unicode.
+ * @returns {String} The Unicode representation of the given Punycode
+ * string.
+ */
+const toUnicode = function(input) {
+ return mapDomain(input, function(string) {
+ return regexPunycode.test(string)
+ ? decode(string.slice(4).toLowerCase())
+ : string;
+ });
+};
+
+/**
+ * Converts a Unicode string representing a domain name or an email address to
+ * Punycode. Only the non-ASCII parts of the domain name will be converted,
+ * i.e. it doesn't matter if you call it with a domain that's already in
+ * ASCII.
+ * @memberOf punycode
+ * @param {String} input The domain name or email address to convert, as a
+ * Unicode string.
+ * @returns {String} The Punycode representation of the given domain name or
+ * email address.
+ */
+const toASCII = function(input) {
+ return mapDomain(input, function(string) {
+ return regexNonASCII.test(string)
+ ? 'xn--' + encode(string)
+ : string;
+ });
+};
+
+/*--------------------------------------------------------------------------*/
+
+/** Define the public API */
+const punycode = {
+ /**
+ * A string representing the current Punycode.js version number.
+ * @memberOf punycode
+ * @type String
+ */
+ 'version': '2.1.0',
+ /**
+ * An object of methods to convert from JavaScript's internal character
+ * representation (UCS-2) to Unicode code points, and back.
+ * @see
+ * @memberOf punycode
+ * @type Object
+ */
+ 'ucs2': {
+ 'decode': ucs2decode,
+ 'encode': ucs2encode
+ },
+ 'decode': decode,
+ 'encode': encode,
+ 'toASCII': toASCII,
+ 'toUnicode': toUnicode
+};
+
+module.exports = punycode;
diff --git a/familyark/app/node_modules/request/CHANGELOG.md b/familyark/app/node_modules/request/CHANGELOG.md
new file mode 100644
index 0000000..d3ffcd0
--- /dev/null
+++ b/familyark/app/node_modules/request/CHANGELOG.md
@@ -0,0 +1,717 @@
+## Change Log
+
+### v2.88.0 (2018/08/10)
+- [#2996](https://github.com/request/request/pull/2996) fix(uuid): import versioned uuid (@kwonoj)
+- [#2994](https://github.com/request/request/pull/2994) Update to oauth-sign 0.9.0 (@dlecocq)
+- [#2993](https://github.com/request/request/pull/2993) Fix header tests (@simov)
+- [#2904](https://github.com/request/request/pull/2904) #515, #2894 Strip port suffix from Host header if the protocol is known. (#2904) (@paambaati)
+- [#2791](https://github.com/request/request/pull/2791) Improve AWS SigV4 support. (#2791) (@vikhyat)
+- [#2977](https://github.com/request/request/pull/2977) Update test certificates (@simov)
+
+### v2.87.0 (2018/05/21)
+- [#2943](https://github.com/request/request/pull/2943) Replace hawk dependency with a local implemenation (#2943) (@hueniverse)
+
+### v2.86.0 (2018/05/15)
+- [#2885](https://github.com/request/request/pull/2885) Remove redundant code (for Node.js 0.9.4 and below) and dependency (@ChALkeR)
+- [#2942](https://github.com/request/request/pull/2942) Make Test GREEN Again! (@simov)
+- [#2923](https://github.com/request/request/pull/2923) Alterations for failing CI tests (@gareth-robinson)
+
+### v2.85.0 (2018/03/12)
+- [#2880](https://github.com/request/request/pull/2880) Revert "Update hawk to 7.0.7 (#2880)" (@simov)
+
+### v2.84.0 (2018/03/12)
+- [#2793](https://github.com/request/request/pull/2793) Fixed calculation of oauth_body_hash, issue #2792 (@dvishniakov)
+- [#2880](https://github.com/request/request/pull/2880) Update hawk to 7.0.7 (#2880) (@kornel-kedzierski)
+
+### v2.83.0 (2017/09/27)
+- [#2776](https://github.com/request/request/pull/2776) Updating tough-cookie due to security fix. (#2776) (@karlnorling)
+
+### v2.82.0 (2017/09/19)
+- [#2703](https://github.com/request/request/pull/2703) Add Node.js v8 to Travis CI (@ryysud)
+- [#2751](https://github.com/request/request/pull/2751) Update of hawk and qs to latest version (#2751) (@Olivier-Moreau)
+- [#2658](https://github.com/request/request/pull/2658) Fixed some text in README.md (#2658) (@Marketionist)
+- [#2635](https://github.com/request/request/pull/2635) chore(package): update aws-sign2 to version 0.7.0 (#2635) (@greenkeeperio-bot)
+- [#2641](https://github.com/request/request/pull/2641) Update README to simplify & update convenience methods (#2641) (@FredKSchott)
+- [#2541](https://github.com/request/request/pull/2541) Add convenience method for HTTP OPTIONS (#2541) (@jamesseanwright)
+- [#2605](https://github.com/request/request/pull/2605) Add promise support section to README (#2605) (@FredKSchott)
+- [#2579](https://github.com/request/request/pull/2579) refactor(lint): replace eslint with standard (#2579) (@ahmadnassri)
+- [#2598](https://github.com/request/request/pull/2598) Update codecov to version 2.0.2 🚀 (@greenkeeperio-bot)
+- [#2590](https://github.com/request/request/pull/2590) Adds test-timing keepAlive test (@nicjansma)
+- [#2589](https://github.com/request/request/pull/2589) fix tabulation on request example README.MD (@odykyi)
+- [#2594](https://github.com/request/request/pull/2594) chore(dependencies): har-validator to 5.x [removes babel dep] (@ahmadnassri)
+
+### v2.81.0 (2017/03/09)
+- [#2584](https://github.com/request/request/pull/2584) Security issue: Upgrade qs to version 6.4.0 (@sergejmueller)
+- [#2578](https://github.com/request/request/pull/2578) safe-buffer doesn't zero-fill by default, its just a polyfill. (#2578) (@mikeal)
+- [#2566](https://github.com/request/request/pull/2566) Timings: Tracks 'lookup', adds 'wait' time, fixes connection re-use (#2566) (@nicjansma)
+- [#2574](https://github.com/request/request/pull/2574) Migrating to safe-buffer for improved security. (@mikeal)
+- [#2573](https://github.com/request/request/pull/2573) fixes #2572 (@ahmadnassri)
+
+### v2.80.0 (2017/03/04)
+- [#2571](https://github.com/request/request/pull/2571) Correctly format the Host header for IPv6 addresses (@JamesMGreene)
+- [#2558](https://github.com/request/request/pull/2558) Update README.md example snippet (@FredKSchott)
+- [#2221](https://github.com/request/request/pull/2221) Adding a simple Response object reference in argument specification (@calamarico)
+- [#2452](https://github.com/request/request/pull/2452) Adds .timings array with DNC, TCP, request and response times (@nicjansma)
+- [#2553](https://github.com/request/request/pull/2553) add ISSUE_TEMPLATE, move PR template (@FredKSchott)
+- [#2539](https://github.com/request/request/pull/2539) Create PULL_REQUEST_TEMPLATE.md (@FredKSchott)
+- [#2524](https://github.com/request/request/pull/2524) Update caseless to version 0.12.0 🚀 (@greenkeeperio-bot)
+- [#2460](https://github.com/request/request/pull/2460) Fix wrong MIME type in example (@OwnageIsMagic)
+- [#2514](https://github.com/request/request/pull/2514) Change tags to keywords in package.json (@humphd)
+- [#2492](https://github.com/request/request/pull/2492) More lenient gzip decompression (@addaleax)
+
+### v2.79.0 (2016/11/18)
+- [#2368](https://github.com/request/request/pull/2368) Fix typeof check in test-pool.js (@forivall)
+- [#2394](https://github.com/request/request/pull/2394) Use `files` in package.json (@SimenB)
+- [#2463](https://github.com/request/request/pull/2463) AWS support for session tokens for temporary credentials (@simov)
+- [#2467](https://github.com/request/request/pull/2467) Migrate to uuid (@simov, @antialias)
+- [#2459](https://github.com/request/request/pull/2459) Update taper to version 0.5.0 🚀 (@greenkeeperio-bot)
+- [#2448](https://github.com/request/request/pull/2448) Make other connect timeout test more reliable too (@mscdex)
+
+### v2.78.0 (2016/11/03)
+- [#2447](https://github.com/request/request/pull/2447) Always set request timeout on keep-alive connections (@mscdex)
+
+### v2.77.0 (2016/11/03)
+- [#2439](https://github.com/request/request/pull/2439) Fix socket 'connect' listener handling (@mscdex)
+- [#2442](https://github.com/request/request/pull/2442) 👻😱 Node.js 0.10 is unmaintained 😱👻 (@greenkeeperio-bot)
+- [#2435](https://github.com/request/request/pull/2435) Add followOriginalHttpMethod to redirect to original HTTP method (@kirrg001)
+- [#2414](https://github.com/request/request/pull/2414) Improve test-timeout reliability (@mscdex)
+
+### v2.76.0 (2016/10/25)
+- [#2424](https://github.com/request/request/pull/2424) Handle buffers directly instead of using "bl" (@zertosh)
+- [#2415](https://github.com/request/request/pull/2415) Re-enable timeout tests on Travis + other fixes (@mscdex)
+- [#2431](https://github.com/request/request/pull/2431) Improve timeouts accuracy and node v6.8.0+ compatibility (@mscdex, @greenkeeperio-bot)
+- [#2428](https://github.com/request/request/pull/2428) Update qs to version 6.3.0 🚀 (@greenkeeperio-bot)
+- [#2420](https://github.com/request/request/pull/2420) change .on to .once, remove possible memory leaks (@duereg)
+- [#2426](https://github.com/request/request/pull/2426) Remove "isFunction" helper in favor of "typeof" check (@zertosh)
+- [#2425](https://github.com/request/request/pull/2425) Simplify "defer" helper creation (@zertosh)
+- [#2402](https://github.com/request/request/pull/2402) form-data@2.1.1 breaks build 🚨 (@greenkeeperio-bot)
+- [#2393](https://github.com/request/request/pull/2393) Update form-data to version 2.1.0 🚀 (@greenkeeperio-bot)
+
+### v2.75.0 (2016/09/17)
+- [#2381](https://github.com/request/request/pull/2381) Drop support for Node 0.10 (@simov)
+- [#2377](https://github.com/request/request/pull/2377) Update form-data to version 2.0.0 🚀 (@greenkeeperio-bot)
+- [#2353](https://github.com/request/request/pull/2353) Add greenkeeper ignored packages (@simov)
+- [#2351](https://github.com/request/request/pull/2351) Update karma-tap to version 3.0.1 🚀 (@greenkeeperio-bot)
+- [#2348](https://github.com/request/request/pull/2348) form-data@1.0.1 breaks build 🚨 (@greenkeeperio-bot)
+- [#2349](https://github.com/request/request/pull/2349) Check error type instead of string (@scotttrinh)
+
+### v2.74.0 (2016/07/22)
+- [#2295](https://github.com/request/request/pull/2295) Update tough-cookie to 2.3.0 (@stash-sfdc)
+- [#2280](https://github.com/request/request/pull/2280) Update karma-tap to version 2.0.1 🚀 (@greenkeeperio-bot)
+
+### v2.73.0 (2016/07/09)
+- [#2240](https://github.com/request/request/pull/2240) Remove connectionErrorHandler to fix #1903 (@zarenner)
+- [#2251](https://github.com/request/request/pull/2251) tape@4.6.0 breaks build 🚨 (@greenkeeperio-bot)
+- [#2225](https://github.com/request/request/pull/2225) Update docs (@ArtskydJ)
+- [#2203](https://github.com/request/request/pull/2203) Update browserify to version 13.0.1 🚀 (@greenkeeperio-bot)
+- [#2275](https://github.com/request/request/pull/2275) Update karma to version 1.1.1 🚀 (@greenkeeperio-bot)
+- [#2204](https://github.com/request/request/pull/2204) Add codecov.yml and disable PR comments (@simov)
+- [#2212](https://github.com/request/request/pull/2212) Fix link to http.IncomingMessage documentation (@nazieb)
+- [#2208](https://github.com/request/request/pull/2208) Update to form-data RC4 and pass null values to it (@simov)
+- [#2207](https://github.com/request/request/pull/2207) Move aws4 require statement to the top (@simov)
+- [#2199](https://github.com/request/request/pull/2199) Update karma-coverage to version 1.0.0 🚀 (@greenkeeperio-bot)
+- [#2206](https://github.com/request/request/pull/2206) Update qs to version 6.2.0 🚀 (@greenkeeperio-bot)
+- [#2205](https://github.com/request/request/pull/2205) Use server-destory to close hanging sockets in tests (@simov)
+- [#2200](https://github.com/request/request/pull/2200) Update karma-cli to version 1.0.0 🚀 (@greenkeeperio-bot)
+
+### v2.72.0 (2016/04/17)
+- [#2176](https://github.com/request/request/pull/2176) Do not try to pipe Gzip responses with no body (@simov)
+- [#2175](https://github.com/request/request/pull/2175) Add 'delete' alias for the 'del' API method (@simov, @MuhanZou)
+- [#2172](https://github.com/request/request/pull/2172) Add support for deflate content encoding (@czardoz)
+- [#2169](https://github.com/request/request/pull/2169) Add callback option (@simov)
+- [#2165](https://github.com/request/request/pull/2165) Check for self.req existence inside the write method (@simov)
+- [#2167](https://github.com/request/request/pull/2167) Fix TravisCI badge reference master branch (@a0viedo)
+
+### v2.71.0 (2016/04/12)
+- [#2164](https://github.com/request/request/pull/2164) Catch errors from the underlying http module (@simov)
+
+### v2.70.0 (2016/04/05)
+- [#2147](https://github.com/request/request/pull/2147) Update eslint to version 2.5.3 🚀 (@simov, @greenkeeperio-bot)
+- [#2009](https://github.com/request/request/pull/2009) Support JSON stringify replacer argument. (@elyobo)
+- [#2142](https://github.com/request/request/pull/2142) Update eslint to version 2.5.1 🚀 (@greenkeeperio-bot)
+- [#2128](https://github.com/request/request/pull/2128) Update browserify-istanbul to version 2.0.0 🚀 (@greenkeeperio-bot)
+- [#2115](https://github.com/request/request/pull/2115) Update eslint to version 2.3.0 🚀 (@simov, @greenkeeperio-bot)
+- [#2089](https://github.com/request/request/pull/2089) Fix badges (@simov)
+- [#2092](https://github.com/request/request/pull/2092) Update browserify-istanbul to version 1.0.0 🚀 (@greenkeeperio-bot)
+- [#2079](https://github.com/request/request/pull/2079) Accept read stream as body option (@simov)
+- [#2070](https://github.com/request/request/pull/2070) Update bl to version 1.1.2 🚀 (@greenkeeperio-bot)
+- [#2063](https://github.com/request/request/pull/2063) Up bluebird and oauth-sign (@simov)
+- [#2058](https://github.com/request/request/pull/2058) Karma fixes for latest versions (@eiriksm)
+- [#2057](https://github.com/request/request/pull/2057) Update contributing guidelines (@simov)
+- [#2054](https://github.com/request/request/pull/2054) Update qs to version 6.1.0 🚀 (@greenkeeperio-bot)
+
+### v2.69.0 (2016/01/27)
+- [#2041](https://github.com/request/request/pull/2041) restore aws4 as regular dependency (@rmg)
+
+### v2.68.0 (2016/01/27)
+- [#2036](https://github.com/request/request/pull/2036) Add AWS Signature Version 4 (@simov, @mirkods)
+- [#2022](https://github.com/request/request/pull/2022) Convert numeric multipart bodies to string (@simov, @feross)
+- [#2024](https://github.com/request/request/pull/2024) Update har-validator dependency for nsp advisory #76 (@TylerDixon)
+- [#2016](https://github.com/request/request/pull/2016) Update qs to version 6.0.2 🚀 (@greenkeeperio-bot)
+- [#2007](https://github.com/request/request/pull/2007) Use the `extend` module instead of util._extend (@simov)
+- [#2003](https://github.com/request/request/pull/2003) Update browserify to version 13.0.0 🚀 (@greenkeeperio-bot)
+- [#1989](https://github.com/request/request/pull/1989) Update buffer-equal to version 1.0.0 🚀 (@greenkeeperio-bot)
+- [#1956](https://github.com/request/request/pull/1956) Check form-data content-length value before setting up the header (@jongyoonlee)
+- [#1958](https://github.com/request/request/pull/1958) Use IncomingMessage.destroy method (@simov)
+- [#1952](https://github.com/request/request/pull/1952) Adds example for Tor proxy (@prometheansacrifice)
+- [#1943](https://github.com/request/request/pull/1943) Update eslint to version 1.10.3 🚀 (@simov, @greenkeeperio-bot)
+- [#1924](https://github.com/request/request/pull/1924) Update eslint to version 1.10.1 🚀 (@greenkeeperio-bot)
+- [#1915](https://github.com/request/request/pull/1915) Remove content-length and transfer-encoding headers from defaultProxyHeaderWhiteList (@yaxia)
+
+### v2.67.0 (2015/11/19)
+- [#1913](https://github.com/request/request/pull/1913) Update http-signature to version 1.1.0 🚀 (@greenkeeperio-bot)
+
+### v2.66.0 (2015/11/18)
+- [#1906](https://github.com/request/request/pull/1906) Update README URLs based on HTTP redirects (@ReadmeCritic)
+- [#1905](https://github.com/request/request/pull/1905) Convert typed arrays into regular buffers (@simov)
+- [#1902](https://github.com/request/request/pull/1902) node-uuid@1.4.7 breaks build 🚨 (@greenkeeperio-bot)
+- [#1894](https://github.com/request/request/pull/1894) Fix tunneling after redirection from https (Original: #1881) (@simov, @falms)
+- [#1893](https://github.com/request/request/pull/1893) Update eslint to version 1.9.0 🚀 (@greenkeeperio-bot)
+- [#1852](https://github.com/request/request/pull/1852) Update eslint to version 1.7.3 🚀 (@simov, @greenkeeperio-bot, @paulomcnally, @michelsalib, @arbaaz, @nsklkn, @LoicMahieu, @JoshWillik, @jzaefferer, @ryanwholey, @djchie, @thisconnect, @mgenereu, @acroca, @Sebmaster, @KoltesDigital)
+- [#1876](https://github.com/request/request/pull/1876) Implement loose matching for har mime types (@simov)
+- [#1875](https://github.com/request/request/pull/1875) Update bluebird to version 3.0.2 🚀 (@simov, @greenkeeperio-bot)
+- [#1871](https://github.com/request/request/pull/1871) Update browserify to version 12.0.1 🚀 (@greenkeeperio-bot)
+- [#1866](https://github.com/request/request/pull/1866) Add missing quotes on x-token property in README (@miguelmota)
+- [#1874](https://github.com/request/request/pull/1874) Fix typo in README.md (@gswalden)
+- [#1860](https://github.com/request/request/pull/1860) Improve referer header tests and docs (@simov)
+- [#1861](https://github.com/request/request/pull/1861) Remove redundant call to Stream constructor (@watson)
+- [#1857](https://github.com/request/request/pull/1857) Fix Referer header to point to the original host name (@simov)
+- [#1850](https://github.com/request/request/pull/1850) Update karma-coverage to version 0.5.3 🚀 (@greenkeeperio-bot)
+- [#1847](https://github.com/request/request/pull/1847) Use node's latest version when building (@simov)
+- [#1836](https://github.com/request/request/pull/1836) Tunnel: fix wrong property name (@KoltesDigital)
+- [#1820](https://github.com/request/request/pull/1820) Set href as request.js uses it (@mgenereu)
+- [#1840](https://github.com/request/request/pull/1840) Update http-signature to version 1.0.2 🚀 (@greenkeeperio-bot)
+- [#1845](https://github.com/request/request/pull/1845) Update istanbul to version 0.4.0 🚀 (@greenkeeperio-bot)
+
+### v2.65.0 (2015/10/11)
+- [#1833](https://github.com/request/request/pull/1833) Update aws-sign2 to version 0.6.0 🚀 (@greenkeeperio-bot)
+- [#1811](https://github.com/request/request/pull/1811) Enable loose cookie parsing in tough-cookie (@Sebmaster)
+- [#1830](https://github.com/request/request/pull/1830) Bring back tilde ranges for all dependencies (@simov)
+- [#1821](https://github.com/request/request/pull/1821) Implement support for RFC 2617 MD5-sess algorithm. (@BigDSK)
+- [#1828](https://github.com/request/request/pull/1828) Updated qs dependency to 5.2.0 (@acroca)
+- [#1818](https://github.com/request/request/pull/1818) Extract `readResponseBody` method out of `onRequestResponse` (@pvoisin)
+- [#1819](https://github.com/request/request/pull/1819) Run stringify once (@mgenereu)
+- [#1814](https://github.com/request/request/pull/1814) Updated har-validator to version 2.0.2 (@greenkeeperio-bot)
+- [#1807](https://github.com/request/request/pull/1807) Updated tough-cookie to version 2.1.0 (@greenkeeperio-bot)
+- [#1800](https://github.com/request/request/pull/1800) Add caret ranges for devDependencies, except eslint (@simov)
+- [#1799](https://github.com/request/request/pull/1799) Updated karma-browserify to version 4.4.0 (@greenkeeperio-bot)
+- [#1797](https://github.com/request/request/pull/1797) Updated tape to version 4.2.0 (@greenkeeperio-bot)
+- [#1788](https://github.com/request/request/pull/1788) Pinned all dependencies (@greenkeeperio-bot)
+
+### v2.64.0 (2015/09/25)
+- [#1787](https://github.com/request/request/pull/1787) npm ignore examples, release.sh and disabled.appveyor.yml (@thisconnect)
+- [#1775](https://github.com/request/request/pull/1775) Fix typo in README.md (@djchie)
+- [#1776](https://github.com/request/request/pull/1776) Changed word 'conjuction' to read 'conjunction' in README.md (@ryanwholey)
+- [#1785](https://github.com/request/request/pull/1785) Revert: Set default application/json content-type when using json option #1772 (@simov)
+
+### v2.63.0 (2015/09/21)
+- [#1772](https://github.com/request/request/pull/1772) Set default application/json content-type when using json option (@jzaefferer)
+
+### v2.62.0 (2015/09/15)
+- [#1768](https://github.com/request/request/pull/1768) Add node 4.0 to the list of build targets (@simov)
+- [#1767](https://github.com/request/request/pull/1767) Query strings now cooperate with unix sockets (@JoshWillik)
+- [#1750](https://github.com/request/request/pull/1750) Revert doc about installation of tough-cookie added in #884 (@LoicMahieu)
+- [#1746](https://github.com/request/request/pull/1746) Missed comma in Readme (@nsklkn)
+- [#1743](https://github.com/request/request/pull/1743) Fix options not being initialized in defaults method (@simov)
+
+### v2.61.0 (2015/08/19)
+- [#1721](https://github.com/request/request/pull/1721) Minor fix in README.md (@arbaaz)
+- [#1733](https://github.com/request/request/pull/1733) Avoid useless Buffer transformation (@michelsalib)
+- [#1726](https://github.com/request/request/pull/1726) Update README.md (@paulomcnally)
+- [#1715](https://github.com/request/request/pull/1715) Fix forever option in node > 0.10 #1709 (@calibr)
+- [#1716](https://github.com/request/request/pull/1716) Do not create Buffer from Object in setContentLength(iojs v3.0 issue) (@calibr)
+- [#1711](https://github.com/request/request/pull/1711) Add ability to detect connect timeouts (@kevinburke)
+- [#1712](https://github.com/request/request/pull/1712) Set certificate expiration to August 2, 2018 (@kevinburke)
+- [#1700](https://github.com/request/request/pull/1700) debug() when JSON.parse() on a response body fails (@phillipj)
+
+### v2.60.0 (2015/07/21)
+- [#1687](https://github.com/request/request/pull/1687) Fix caseless bug - content-type not being set for multipart/form-data (@simov, @garymathews)
+
+### v2.59.0 (2015/07/20)
+- [#1671](https://github.com/request/request/pull/1671) Add tests and docs for using the agent, agentClass, agentOptions and forever options.
+ Forever option defaults to using http(s).Agent in node 0.12+ (@simov)
+- [#1679](https://github.com/request/request/pull/1679) Fix - do not remove OAuth param when using OAuth realm (@simov, @jhalickman)
+- [#1668](https://github.com/request/request/pull/1668) updated dependencies (@deamme)
+- [#1656](https://github.com/request/request/pull/1656) Fix form method (@simov)
+- [#1651](https://github.com/request/request/pull/1651) Preserve HEAD method when using followAllRedirects (@simov)
+- [#1652](https://github.com/request/request/pull/1652) Update `encoding` option documentation in README.md (@daniel347x)
+- [#1650](https://github.com/request/request/pull/1650) Allow content-type overriding when using the `form` option (@simov)
+- [#1646](https://github.com/request/request/pull/1646) Clarify the nature of setting `ca` in `agentOptions` (@jeffcharles)
+
+### v2.58.0 (2015/06/16)
+- [#1638](https://github.com/request/request/pull/1638) Use the `extend` module to deep extend in the defaults method (@simov)
+- [#1631](https://github.com/request/request/pull/1631) Move tunnel logic into separate module (@simov)
+- [#1634](https://github.com/request/request/pull/1634) Fix OAuth query transport_method (@simov)
+- [#1603](https://github.com/request/request/pull/1603) Add codecov (@simov)
+
+### v2.57.0 (2015/05/31)
+- [#1615](https://github.com/request/request/pull/1615) Replace '.client' with '.socket' as the former was deprecated in 2.2.0. (@ChALkeR)
+
+### v2.56.0 (2015/05/28)
+- [#1610](https://github.com/request/request/pull/1610) Bump module dependencies (@simov)
+- [#1600](https://github.com/request/request/pull/1600) Extract the querystring logic into separate module (@simov)
+- [#1607](https://github.com/request/request/pull/1607) Re-generate certificates (@simov)
+- [#1599](https://github.com/request/request/pull/1599) Move getProxyFromURI logic below the check for Invaild URI (#1595) (@simov)
+- [#1598](https://github.com/request/request/pull/1598) Fix the way http verbs are defined in order to please intellisense IDEs (@simov, @flannelJesus)
+- [#1591](https://github.com/request/request/pull/1591) A few minor fixes: (@simov)
+- [#1584](https://github.com/request/request/pull/1584) Refactor test-default tests (according to comments in #1430) (@simov)
+- [#1585](https://github.com/request/request/pull/1585) Fixing documentation regarding TLS options (#1583) (@mainakae)
+- [#1574](https://github.com/request/request/pull/1574) Refresh the oauth_nonce on redirect (#1573) (@simov)
+- [#1570](https://github.com/request/request/pull/1570) Discovered tests that weren't properly running (@seanstrom)
+- [#1569](https://github.com/request/request/pull/1569) Fix pause before response arrives (@kevinoid)
+- [#1558](https://github.com/request/request/pull/1558) Emit error instead of throw (@simov)
+- [#1568](https://github.com/request/request/pull/1568) Fix stall when piping gzipped response (@kevinoid)
+- [#1560](https://github.com/request/request/pull/1560) Update combined-stream (@apechimp)
+- [#1543](https://github.com/request/request/pull/1543) Initial support for oauth_body_hash on json payloads (@simov, @aesopwolf)
+- [#1541](https://github.com/request/request/pull/1541) Fix coveralls (@simov)
+- [#1540](https://github.com/request/request/pull/1540) Fix recursive defaults for convenience methods (@simov)
+- [#1536](https://github.com/request/request/pull/1536) More eslint style rules (@froatsnook)
+- [#1533](https://github.com/request/request/pull/1533) Adding dependency status bar to README.md (@YasharF)
+- [#1539](https://github.com/request/request/pull/1539) ensure the latest version of har-validator is included (@ahmadnassri)
+- [#1516](https://github.com/request/request/pull/1516) forever+pool test (@devTristan)
+
+### v2.55.0 (2015/04/05)
+- [#1520](https://github.com/request/request/pull/1520) Refactor defaults (@simov)
+- [#1525](https://github.com/request/request/pull/1525) Delete request headers with undefined value. (@froatsnook)
+- [#1521](https://github.com/request/request/pull/1521) Add promise tests (@simov)
+- [#1518](https://github.com/request/request/pull/1518) Fix defaults (@simov)
+- [#1515](https://github.com/request/request/pull/1515) Allow static invoking of convenience methods (@simov)
+- [#1505](https://github.com/request/request/pull/1505) Fix multipart boundary extraction regexp (@simov)
+- [#1510](https://github.com/request/request/pull/1510) Fix basic auth form data (@simov)
+
+### v2.54.0 (2015/03/24)
+- [#1501](https://github.com/request/request/pull/1501) HTTP Archive 1.2 support (@ahmadnassri)
+- [#1486](https://github.com/request/request/pull/1486) Add a test for the forever agent (@akshayp)
+- [#1500](https://github.com/request/request/pull/1500) Adding handling for no auth method and null bearer (@philberg)
+- [#1498](https://github.com/request/request/pull/1498) Add table of contents in readme (@simov)
+- [#1477](https://github.com/request/request/pull/1477) Add support for qs options via qsOptions key (@simov)
+- [#1496](https://github.com/request/request/pull/1496) Parameters encoded to base 64 should be decoded as UTF-8, not ASCII. (@albanm)
+- [#1494](https://github.com/request/request/pull/1494) Update eslint (@froatsnook)
+- [#1474](https://github.com/request/request/pull/1474) Require Colon in Basic Auth (@erykwalder)
+- [#1481](https://github.com/request/request/pull/1481) Fix baseUrl and redirections. (@burningtree)
+- [#1469](https://github.com/request/request/pull/1469) Feature/base url (@froatsnook)
+- [#1459](https://github.com/request/request/pull/1459) Add option to time request/response cycle (including rollup of redirects) (@aaron-em)
+- [#1468](https://github.com/request/request/pull/1468) Re-enable io.js/node 0.12 build (@simov, @mikeal, @BBB)
+- [#1442](https://github.com/request/request/pull/1442) Fixed the issue with strictSSL tests on 0.12 & io.js by explicitly setting a cipher that matches the cert. (@BBB, @nickmccurdy, @demohi, @simov, @0x4139)
+- [#1460](https://github.com/request/request/pull/1460) localAddress or proxy config is lost when redirecting (@simov, @0x4139)
+- [#1453](https://github.com/request/request/pull/1453) Test on Node.js 0.12 and io.js with allowed failures (@nickmccurdy, @demohi)
+- [#1426](https://github.com/request/request/pull/1426) Fixing tests to pass on io.js and node 0.12 (only test-https.js stiff failing) (@mikeal)
+- [#1446](https://github.com/request/request/pull/1446) Missing HTTP referer header with redirects Fixes #1038 (@simov, @guimon)
+- [#1428](https://github.com/request/request/pull/1428) Deprecate Node v0.8.x (@nylen)
+- [#1436](https://github.com/request/request/pull/1436) Add ability to set a requester without setting default options (@tikotzky)
+- [#1435](https://github.com/request/request/pull/1435) dry up verb methods (@sethpollack)
+- [#1423](https://github.com/request/request/pull/1423) Allow fully qualified multipart content-type header (@simov)
+- [#1430](https://github.com/request/request/pull/1430) Fix recursive requester (@tikotzky)
+- [#1429](https://github.com/request/request/pull/1429) Throw error when making HEAD request with a body (@tikotzky)
+- [#1419](https://github.com/request/request/pull/1419) Add note that the project is broken in 0.12.x (@nylen)
+- [#1413](https://github.com/request/request/pull/1413) Fix basic auth (@simov)
+- [#1397](https://github.com/request/request/pull/1397) Improve pipe-from-file tests (@nylen)
+
+### v2.53.0 (2015/02/02)
+- [#1396](https://github.com/request/request/pull/1396) Do not rfc3986 escape JSON bodies (@nylen, @simov)
+- [#1392](https://github.com/request/request/pull/1392) Improve `timeout` option description (@watson)
+
+### v2.52.0 (2015/02/02)
+- [#1383](https://github.com/request/request/pull/1383) Add missing HTTPS options that were not being passed to tunnel (@brichard19) (@nylen)
+- [#1388](https://github.com/request/request/pull/1388) Upgrade mime-types package version (@roderickhsiao)
+- [#1389](https://github.com/request/request/pull/1389) Revise Setup Tunnel Function (@seanstrom)
+- [#1374](https://github.com/request/request/pull/1374) Allow explicitly disabling tunneling for proxied https destinations (@nylen)
+- [#1376](https://github.com/request/request/pull/1376) Use karma-browserify for tests. Add browser test coverage reporter. (@eiriksm)
+- [#1366](https://github.com/request/request/pull/1366) Refactor OAuth into separate module (@simov)
+- [#1373](https://github.com/request/request/pull/1373) Rewrite tunnel test to be pure Node.js (@nylen)
+- [#1371](https://github.com/request/request/pull/1371) Upgrade test reporter (@nylen)
+- [#1360](https://github.com/request/request/pull/1360) Refactor basic, bearer, digest auth logic into separate class (@simov)
+- [#1354](https://github.com/request/request/pull/1354) Remove circular dependency from debugging code (@nylen)
+- [#1351](https://github.com/request/request/pull/1351) Move digest auth into private prototype method (@simov)
+- [#1352](https://github.com/request/request/pull/1352) Update hawk dependency to ~2.3.0 (@mridgway)
+- [#1353](https://github.com/request/request/pull/1353) Correct travis-ci badge (@dogancelik)
+- [#1349](https://github.com/request/request/pull/1349) Make sure we return on errored browser requests. (@eiriksm)
+- [#1346](https://github.com/request/request/pull/1346) getProxyFromURI Extraction Refactor (@seanstrom)
+- [#1337](https://github.com/request/request/pull/1337) Standardize test ports on 6767 (@nylen)
+- [#1341](https://github.com/request/request/pull/1341) Emit FormData error events as Request error events (@nylen, @rwky)
+- [#1343](https://github.com/request/request/pull/1343) Clean up readme badges, and add Travis and Coveralls badges (@nylen)
+- [#1345](https://github.com/request/request/pull/1345) Update README.md (@Aaron-Hartwig)
+- [#1338](https://github.com/request/request/pull/1338) Always wait for server.close() callback in tests (@nylen)
+- [#1342](https://github.com/request/request/pull/1342) Add mock https server and redo start of browser tests for this purpose. (@eiriksm)
+- [#1339](https://github.com/request/request/pull/1339) Improve auth docs (@nylen)
+- [#1335](https://github.com/request/request/pull/1335) Add support for OAuth plaintext signature method (@simov)
+- [#1332](https://github.com/request/request/pull/1332) Add clean script to remove test-browser.js after the tests run (@seanstrom)
+- [#1327](https://github.com/request/request/pull/1327) Fix errors generating coverage reports. (@nylen)
+- [#1330](https://github.com/request/request/pull/1330) Return empty buffer upon empty response body and encoding is set to null (@seanstrom)
+- [#1326](https://github.com/request/request/pull/1326) Use faster container-based infrastructure on Travis (@nylen)
+- [#1315](https://github.com/request/request/pull/1315) Implement rfc3986 option (@simov, @nylen, @apoco, @DullReferenceException, @mmalecki, @oliamb, @cliffcrosland, @LewisJEllis, @eiriksm, @poislagarde)
+- [#1314](https://github.com/request/request/pull/1314) Detect urlencoded form data header via regex (@simov)
+- [#1317](https://github.com/request/request/pull/1317) Improve OAuth1.0 server side flow example (@simov)
+
+### v2.51.0 (2014/12/10)
+- [#1310](https://github.com/request/request/pull/1310) Revert changes introduced in https://github.com/request/request/pull/1282 (@simov)
+
+### v2.50.0 (2014/12/09)
+- [#1308](https://github.com/request/request/pull/1308) Add browser test to keep track of browserify compability. (@eiriksm)
+- [#1299](https://github.com/request/request/pull/1299) Add optional support for jsonReviver (@poislagarde)
+- [#1277](https://github.com/request/request/pull/1277) Add Coveralls configuration (@simov)
+- [#1307](https://github.com/request/request/pull/1307) Upgrade form-data, add back browserify compability. Fixes #455. (@eiriksm)
+- [#1305](https://github.com/request/request/pull/1305) Fix typo in README.md (@LewisJEllis)
+- [#1288](https://github.com/request/request/pull/1288) Update README.md to explain custom file use case (@cliffcrosland)
+
+### v2.49.0 (2014/11/28)
+- [#1295](https://github.com/request/request/pull/1295) fix(proxy): no-proxy false positive (@oliamb)
+- [#1292](https://github.com/request/request/pull/1292) Upgrade `caseless` to 0.8.1 (@mmalecki)
+- [#1276](https://github.com/request/request/pull/1276) Set transfer encoding for multipart/related to chunked by default (@simov)
+- [#1275](https://github.com/request/request/pull/1275) Fix multipart content-type headers detection (@simov)
+- [#1269](https://github.com/request/request/pull/1269) adds streams example for review (@tbuchok)
+- [#1238](https://github.com/request/request/pull/1238) Add examples README.md (@simov)
+
+### v2.48.0 (2014/11/12)
+- [#1263](https://github.com/request/request/pull/1263) Fixed a syntax error / typo in README.md (@xna2)
+- [#1253](https://github.com/request/request/pull/1253) Add multipart chunked flag (@simov, @nylen)
+- [#1251](https://github.com/request/request/pull/1251) Clarify that defaults() does not modify global defaults (@nylen)
+- [#1250](https://github.com/request/request/pull/1250) Improve documentation for pool and maxSockets options (@nylen)
+- [#1237](https://github.com/request/request/pull/1237) Documenting error handling when using streams (@vmattos)
+- [#1244](https://github.com/request/request/pull/1244) Finalize changelog command (@nylen)
+- [#1241](https://github.com/request/request/pull/1241) Fix typo (@alexanderGugel)
+- [#1223](https://github.com/request/request/pull/1223) Show latest version number instead of "upcoming" in changelog (@nylen)
+- [#1236](https://github.com/request/request/pull/1236) Document how to use custom CA in README (#1229) (@hypesystem)
+- [#1228](https://github.com/request/request/pull/1228) Support for oauth with RSA-SHA1 signing (@nylen)
+- [#1216](https://github.com/request/request/pull/1216) Made json and multipart options coexist (@nylen, @simov)
+- [#1225](https://github.com/request/request/pull/1225) Allow header white/exclusive lists in any case. (@RReverser)
+
+### v2.47.0 (2014/10/26)
+- [#1222](https://github.com/request/request/pull/1222) Move from mikeal/request to request/request (@nylen)
+- [#1220](https://github.com/request/request/pull/1220) update qs dependency to 2.3.1 (@FredKSchott)
+- [#1212](https://github.com/request/request/pull/1212) Improve tests/test-timeout.js (@nylen)
+- [#1219](https://github.com/request/request/pull/1219) remove old globalAgent workaround for node 0.4 (@request)
+- [#1214](https://github.com/request/request/pull/1214) Remove cruft left over from optional dependencies (@nylen)
+- [#1215](https://github.com/request/request/pull/1215) Add proxyHeaderExclusiveList option for proxy-only headers. (@RReverser)
+- [#1211](https://github.com/request/request/pull/1211) Allow 'Host' header instead of 'host' and remember case across redirects (@nylen)
+- [#1208](https://github.com/request/request/pull/1208) Improve release script (@nylen)
+- [#1213](https://github.com/request/request/pull/1213) Support for custom cookie store (@nylen, @mitsuru)
+- [#1197](https://github.com/request/request/pull/1197) Clean up some code around setting the agent (@FredKSchott)
+- [#1209](https://github.com/request/request/pull/1209) Improve multipart form append test (@simov)
+- [#1207](https://github.com/request/request/pull/1207) Update changelog (@nylen)
+- [#1185](https://github.com/request/request/pull/1185) Stream multipart/related bodies (@simov)
+
+### v2.46.0 (2014/10/23)
+- [#1198](https://github.com/request/request/pull/1198) doc for TLS/SSL protocol options (@shawnzhu)
+- [#1200](https://github.com/request/request/pull/1200) Add a Gitter chat badge to README.md (@gitter-badger)
+- [#1196](https://github.com/request/request/pull/1196) Upgrade taper test reporter to v0.3.0 (@nylen)
+- [#1199](https://github.com/request/request/pull/1199) Fix lint error: undeclared var i (@nylen)
+- [#1191](https://github.com/request/request/pull/1191) Move self.proxy decision logic out of init and into a helper (@FredKSchott)
+- [#1190](https://github.com/request/request/pull/1190) Move _buildRequest() logic back into init (@FredKSchott)
+- [#1186](https://github.com/request/request/pull/1186) Support Smarter Unix URL Scheme (@FredKSchott)
+- [#1178](https://github.com/request/request/pull/1178) update form documentation for new usage (@FredKSchott)
+- [#1180](https://github.com/request/request/pull/1180) Enable no-mixed-requires linting rule (@nylen)
+- [#1184](https://github.com/request/request/pull/1184) Don't forward authorization header across redirects to different hosts (@nylen)
+- [#1183](https://github.com/request/request/pull/1183) Correct README about pre and postamble CRLF using multipart and not mult... (@netpoetica)
+- [#1179](https://github.com/request/request/pull/1179) Lint tests directory (@nylen)
+- [#1169](https://github.com/request/request/pull/1169) add metadata for form-data file field (@dotcypress)
+- [#1173](https://github.com/request/request/pull/1173) remove optional dependencies (@seanstrom)
+- [#1165](https://github.com/request/request/pull/1165) Cleanup event listeners and remove function creation from init (@FredKSchott)
+- [#1174](https://github.com/request/request/pull/1174) update the request.cookie docs to have a valid cookie example (@seanstrom)
+- [#1168](https://github.com/request/request/pull/1168) create a detach helper and use detach helper in replace of nextTick (@seanstrom)
+- [#1171](https://github.com/request/request/pull/1171) in post can send form data and use callback (@MiroRadenovic)
+- [#1159](https://github.com/request/request/pull/1159) accept charset for x-www-form-urlencoded content-type (@seanstrom)
+- [#1157](https://github.com/request/request/pull/1157) Update README.md: body with json=true (@Rob--W)
+- [#1164](https://github.com/request/request/pull/1164) Disable tests/test-timeout.js on Travis (@nylen)
+- [#1153](https://github.com/request/request/pull/1153) Document how to run a single test (@nylen)
+- [#1144](https://github.com/request/request/pull/1144) adds documentation for the "response" event within the streaming section (@tbuchok)
+- [#1162](https://github.com/request/request/pull/1162) Update eslintrc file to no longer allow past errors (@FredKSchott)
+- [#1155](https://github.com/request/request/pull/1155) Support/use self everywhere (@seanstrom)
+- [#1161](https://github.com/request/request/pull/1161) fix no-use-before-define lint warnings (@emkay)
+- [#1156](https://github.com/request/request/pull/1156) adding curly brackets to get rid of lint errors (@emkay)
+- [#1151](https://github.com/request/request/pull/1151) Fix localAddress test on OS X (@nylen)
+- [#1145](https://github.com/request/request/pull/1145) documentation: fix outdated reference to setCookieSync old name in README (@FredKSchott)
+- [#1131](https://github.com/request/request/pull/1131) Update pool documentation (@FredKSchott)
+- [#1143](https://github.com/request/request/pull/1143) Rewrite all tests to use tape (@nylen)
+- [#1137](https://github.com/request/request/pull/1137) Add ability to specifiy querystring lib in options. (@jgrund)
+- [#1138](https://github.com/request/request/pull/1138) allow hostname and port in place of host on uri (@cappslock)
+- [#1134](https://github.com/request/request/pull/1134) Fix multiple redirects and `self.followRedirect` (@blakeembrey)
+- [#1130](https://github.com/request/request/pull/1130) documentation fix: add note about npm test for contributing (@FredKSchott)
+- [#1120](https://github.com/request/request/pull/1120) Support/refactor request setup tunnel (@seanstrom)
+- [#1129](https://github.com/request/request/pull/1129) linting fix: convert double quote strings to use single quotes (@FredKSchott)
+- [#1124](https://github.com/request/request/pull/1124) linting fix: remove unneccesary semi-colons (@FredKSchott)
+
+### v2.45.0 (2014/10/06)
+- [#1128](https://github.com/request/request/pull/1128) Add test for setCookie regression (@nylen)
+- [#1127](https://github.com/request/request/pull/1127) added tests around using objects as values in a query string (@bcoe)
+- [#1103](https://github.com/request/request/pull/1103) Support/refactor request constructor (@nylen, @seanstrom)
+- [#1119](https://github.com/request/request/pull/1119) add basic linting to request library (@FredKSchott)
+- [#1121](https://github.com/request/request/pull/1121) Revert "Explicitly use sync versions of cookie functions" (@nylen)
+- [#1118](https://github.com/request/request/pull/1118) linting fix: Restructure bad empty if statement (@FredKSchott)
+- [#1117](https://github.com/request/request/pull/1117) Fix a bad check for valid URIs (@FredKSchott)
+- [#1113](https://github.com/request/request/pull/1113) linting fix: space out operators (@FredKSchott)
+- [#1116](https://github.com/request/request/pull/1116) Fix typo in `noProxyHost` definition (@FredKSchott)
+- [#1114](https://github.com/request/request/pull/1114) linting fix: Added a `new` operator that was missing when creating and throwing a new error (@FredKSchott)
+- [#1096](https://github.com/request/request/pull/1096) No_proxy support (@samcday)
+- [#1107](https://github.com/request/request/pull/1107) linting-fix: remove unused variables (@FredKSchott)
+- [#1112](https://github.com/request/request/pull/1112) linting fix: Make return values consistent and more straitforward (@FredKSchott)
+- [#1111](https://github.com/request/request/pull/1111) linting fix: authPieces was getting redeclared (@FredKSchott)
+- [#1105](https://github.com/request/request/pull/1105) Use strict mode in request (@FredKSchott)
+- [#1110](https://github.com/request/request/pull/1110) linting fix: replace lazy '==' with more strict '===' (@FredKSchott)
+- [#1109](https://github.com/request/request/pull/1109) linting fix: remove function call from if-else conditional statement (@FredKSchott)
+- [#1102](https://github.com/request/request/pull/1102) Fix to allow setting a `requester` on recursive calls to `request.defaults` (@tikotzky)
+- [#1095](https://github.com/request/request/pull/1095) Tweaking engines in package.json (@pdehaan)
+- [#1082](https://github.com/request/request/pull/1082) Forward the socket event from the httpModule request (@seanstrom)
+- [#972](https://github.com/request/request/pull/972) Clarify gzip handling in the README (@kevinoid)
+- [#1089](https://github.com/request/request/pull/1089) Mention that encoding defaults to utf8, not Buffer (@stuartpb)
+- [#1088](https://github.com/request/request/pull/1088) Fix cookie example in README.md and make it more clear (@pipi32167)
+- [#1027](https://github.com/request/request/pull/1027) Add support for multipart form data in request options. (@crocket)
+- [#1076](https://github.com/request/request/pull/1076) use Request.abort() to abort the request when the request has timed-out (@seanstrom)
+- [#1068](https://github.com/request/request/pull/1068) add optional postamble required by .NET multipart requests (@netpoetica)
+
+### v2.43.0 (2014/09/18)
+- [#1057](https://github.com/request/request/pull/1057) Defaults should not overwrite defined options (@davidwood)
+- [#1046](https://github.com/request/request/pull/1046) Propagate datastream errors, useful in case gzip fails. (@ZJONSSON, @Janpot)
+- [#1063](https://github.com/request/request/pull/1063) copy the input headers object #1060 (@finnp)
+- [#1031](https://github.com/request/request/pull/1031) Explicitly use sync versions of cookie functions (@ZJONSSON)
+- [#1056](https://github.com/request/request/pull/1056) Fix redirects when passing url.parse(x) as URL to convenience method (@nylen)
+
+### v2.42.0 (2014/09/04)
+- [#1053](https://github.com/request/request/pull/1053) Fix #1051 Parse auth properly when using non-tunneling proxy (@isaacs)
+
+### v2.41.0 (2014/09/04)
+- [#1050](https://github.com/request/request/pull/1050) Pass whitelisted headers to tunneling proxy. Organize all tunneling logic. (@isaacs, @Feldhacker)
+- [#1035](https://github.com/request/request/pull/1035) souped up nodei.co badge (@rvagg)
+- [#1048](https://github.com/request/request/pull/1048) Aws is now possible over a proxy (@steven-aerts)
+- [#1039](https://github.com/request/request/pull/1039) extract out helper functions to a helper file (@seanstrom)
+- [#1021](https://github.com/request/request/pull/1021) Support/refactor indexjs (@seanstrom)
+- [#1033](https://github.com/request/request/pull/1033) Improve and document debug options (@nylen)
+- [#1034](https://github.com/request/request/pull/1034) Fix readme headings (@nylen)
+- [#1030](https://github.com/request/request/pull/1030) Allow recursive request.defaults (@tikotzky)
+- [#1029](https://github.com/request/request/pull/1029) Fix a couple of typos (@nylen)
+- [#675](https://github.com/request/request/pull/675) Checking for SSL fault on connection before reading SSL properties (@VRMink)
+- [#989](https://github.com/request/request/pull/989) Added allowRedirect function. Should return true if redirect is allowed or false otherwise (@doronin)
+- [#1025](https://github.com/request/request/pull/1025) [fixes #1023] Set self._ended to true once response has ended (@mridgway)
+- [#1020](https://github.com/request/request/pull/1020) Add back removed debug metadata (@FredKSchott)
+- [#1008](https://github.com/request/request/pull/1008) Moving to module instead of cutomer buffer concatenation. (@mikeal)
+- [#770](https://github.com/request/request/pull/770) Added dependency badge for README file; (@timgluz, @mafintosh, @lalitkapoor, @stash, @bobyrizov)
+- [#1016](https://github.com/request/request/pull/1016) toJSON no longer results in an infinite loop, returns simple objects (@FredKSchott)
+- [#1018](https://github.com/request/request/pull/1018) Remove pre-0.4.4 HTTPS fix (@mmalecki)
+- [#1006](https://github.com/request/request/pull/1006) Migrate to caseless, fixes #1001 (@mikeal)
+- [#995](https://github.com/request/request/pull/995) Fix parsing array of objects (@sjonnet19)
+- [#999](https://github.com/request/request/pull/999) Fix fallback for browserify for optional modules. (@eiriksm)
+- [#996](https://github.com/request/request/pull/996) Wrong oauth signature when multiple same param keys exist [updated] (@bengl)
+
+### v2.40.0 (2014/08/06)
+- [#992](https://github.com/request/request/pull/992) Fix security vulnerability. Update qs (@poeticninja)
+- [#988](https://github.com/request/request/pull/988) “--” -> “—” (@upisfree)
+- [#987](https://github.com/request/request/pull/987) Show optional modules as being loaded by the module that reqeusted them (@iarna)
+
+### v2.39.0 (2014/07/24)
+- [#976](https://github.com/request/request/pull/976) Update README.md (@pvoznenko)
+
+### v2.38.0 (2014/07/22)
+- [#952](https://github.com/request/request/pull/952) Adding support to client certificate with proxy use case (@ofirshaked)
+- [#884](https://github.com/request/request/pull/884) Documented tough-cookie installation. (@wbyoung)
+- [#935](https://github.com/request/request/pull/935) Correct repository url (@fritx)
+- [#963](https://github.com/request/request/pull/963) Update changelog (@nylen)
+- [#960](https://github.com/request/request/pull/960) Support gzip with encoding on node pre-v0.9.4 (@kevinoid)
+- [#953](https://github.com/request/request/pull/953) Add async Content-Length computation when using form-data (@LoicMahieu)
+- [#844](https://github.com/request/request/pull/844) Add support for HTTP[S]_PROXY environment variables. Fixes #595. (@jvmccarthy)
+- [#946](https://github.com/request/request/pull/946) defaults: merge headers (@aj0strow)
+
+### v2.37.0 (2014/07/07)
+- [#957](https://github.com/request/request/pull/957) Silence EventEmitter memory leak warning #311 (@watson)
+- [#955](https://github.com/request/request/pull/955) check for content-length header before setting it in nextTick (@camilleanne)
+- [#951](https://github.com/request/request/pull/951) Add support for gzip content decoding (@kevinoid)
+- [#949](https://github.com/request/request/pull/949) Manually enter querystring in form option (@charlespwd)
+- [#944](https://github.com/request/request/pull/944) Make request work with browserify (@eiriksm)
+- [#943](https://github.com/request/request/pull/943) New mime module (@eiriksm)
+- [#927](https://github.com/request/request/pull/927) Bump version of hawk dep. (@samccone)
+- [#907](https://github.com/request/request/pull/907) append secureOptions to poolKey (@medovob)
+
+### v2.35.0 (2014/05/17)
+- [#901](https://github.com/request/request/pull/901) Fixes #555 (@pigulla)
+- [#897](https://github.com/request/request/pull/897) merge with default options (@vohof)
+- [#891](https://github.com/request/request/pull/891) fixes 857 - options object is mutated by calling request (@lalitkapoor)
+- [#869](https://github.com/request/request/pull/869) Pipefilter test (@tgohn)
+- [#866](https://github.com/request/request/pull/866) Fix typo (@dandv)
+- [#861](https://github.com/request/request/pull/861) Add support for RFC 6750 Bearer Tokens (@phedny)
+- [#809](https://github.com/request/request/pull/809) upgrade tunnel-proxy to 0.4.0 (@ksato9700)
+- [#850](https://github.com/request/request/pull/850) Fix word consistency in readme (@0xNobody)
+- [#810](https://github.com/request/request/pull/810) add some exposition to mpu example in README.md (@mikermcneil)
+- [#840](https://github.com/request/request/pull/840) improve error reporting for invalid protocols (@FND)
+- [#821](https://github.com/request/request/pull/821) added secureOptions back (@nw)
+- [#815](https://github.com/request/request/pull/815) Create changelog based on pull requests (@lalitkapoor)
+
+### v2.34.0 (2014/02/18)
+- [#516](https://github.com/request/request/pull/516) UNIX Socket URL Support (@lyuzashi)
+- [#801](https://github.com/request/request/pull/801) 794 ignore cookie parsing and domain errors (@lalitkapoor)
+- [#802](https://github.com/request/request/pull/802) Added the Apache license to the package.json. (@keskival)
+- [#793](https://github.com/request/request/pull/793) Adds content-length calculation when submitting forms using form-data li... (@Juul)
+- [#785](https://github.com/request/request/pull/785) Provide ability to override content-type when `json` option used (@vvo)
+- [#781](https://github.com/request/request/pull/781) simpler isReadStream function (@joaojeronimo)
+
+### v2.32.0 (2014/01/16)
+- [#767](https://github.com/request/request/pull/767) Use tough-cookie CookieJar sync API (@stash)
+- [#764](https://github.com/request/request/pull/764) Case-insensitive authentication scheme (@bobyrizov)
+- [#763](https://github.com/request/request/pull/763) Upgrade tough-cookie to 0.10.0 (@stash)
+- [#744](https://github.com/request/request/pull/744) Use Cookie.parse (@lalitkapoor)
+- [#757](https://github.com/request/request/pull/757) require aws-sign2 (@mafintosh)
+
+### v2.31.0 (2014/01/08)
+- [#645](https://github.com/request/request/pull/645) update twitter api url to v1.1 (@mick)
+- [#746](https://github.com/request/request/pull/746) README: Markdown code highlight (@weakish)
+- [#745](https://github.com/request/request/pull/745) updating setCookie example to make it clear that the callback is required (@emkay)
+- [#742](https://github.com/request/request/pull/742) Add note about JSON output body type (@iansltx)
+- [#741](https://github.com/request/request/pull/741) README example is using old cookie jar api (@emkay)
+- [#736](https://github.com/request/request/pull/736) Fix callback arguments documentation (@mmalecki)
+- [#732](https://github.com/request/request/pull/732) JSHINT: Creating global 'for' variable. Should be 'for (var ...'. (@Fritz-Lium)
+- [#730](https://github.com/request/request/pull/730) better HTTP DIGEST support (@dai-shi)
+- [#728](https://github.com/request/request/pull/728) Fix TypeError when calling request.cookie (@scarletmeow)
+- [#727](https://github.com/request/request/pull/727) fix requester bug (@jchris)
+- [#724](https://github.com/request/request/pull/724) README.md: add custom HTTP Headers example. (@tcort)
+- [#719](https://github.com/request/request/pull/719) Made a comment gender neutral. (@unsetbit)
+- [#715](https://github.com/request/request/pull/715) Request.multipart no longer crashes when header 'Content-type' present (@pastaclub)
+- [#710](https://github.com/request/request/pull/710) Fixing listing in callback part of docs. (@lukasz-zak)
+- [#696](https://github.com/request/request/pull/696) Edited README.md for formatting and clarity of phrasing (@Zearin)
+- [#694](https://github.com/request/request/pull/694) Typo in README (@VRMink)
+- [#690](https://github.com/request/request/pull/690) Handle blank password in basic auth. (@diversario)
+- [#682](https://github.com/request/request/pull/682) Optional dependencies (@Turbo87)
+- [#683](https://github.com/request/request/pull/683) Travis CI support (@Turbo87)
+- [#674](https://github.com/request/request/pull/674) change cookie module,to tough-cookie.please check it . (@sxyizhiren)
+- [#666](https://github.com/request/request/pull/666) make `ciphers` and `secureProtocol` to work in https request (@richarddong)
+- [#656](https://github.com/request/request/pull/656) Test case for #304. (@diversario)
+- [#662](https://github.com/request/request/pull/662) option.tunnel to explicitly disable tunneling (@seanmonstar)
+- [#659](https://github.com/request/request/pull/659) fix failure when running with NODE_DEBUG=request, and a test for that (@jrgm)
+- [#630](https://github.com/request/request/pull/630) Send random cnonce for HTTP Digest requests (@wprl)
+- [#619](https://github.com/request/request/pull/619) decouple things a bit (@joaojeronimo)
+- [#613](https://github.com/request/request/pull/613) Fixes #583, moved initialization of self.uri.pathname (@lexander)
+- [#605](https://github.com/request/request/pull/605) Only include ":" + pass in Basic Auth if it's defined (fixes #602) (@bendrucker)
+- [#596](https://github.com/request/request/pull/596) Global agent is being used when pool is specified (@Cauldrath)
+- [#594](https://github.com/request/request/pull/594) Emit complete event when there is no callback (@RomainLK)
+- [#601](https://github.com/request/request/pull/601) Fixed a small typo (@michalstanko)
+- [#589](https://github.com/request/request/pull/589) Prevent setting headers after they are sent (@geek)
+- [#587](https://github.com/request/request/pull/587) Global cookie jar disabled by default (@threepointone)
+- [#544](https://github.com/request/request/pull/544) Update http-signature version. (@davidlehn)
+- [#581](https://github.com/request/request/pull/581) Fix spelling of "ignoring." (@bigeasy)
+- [#568](https://github.com/request/request/pull/568) use agentOptions to create agent when specified in request (@SamPlacette)
+- [#564](https://github.com/request/request/pull/564) Fix redirections (@criloz)
+- [#541](https://github.com/request/request/pull/541) The exported request function doesn't have an auth method (@tschaub)
+- [#542](https://github.com/request/request/pull/542) Expose Request class (@regality)
+- [#536](https://github.com/request/request/pull/536) Allow explicitly empty user field for basic authentication. (@mikeando)
+- [#532](https://github.com/request/request/pull/532) fix typo (@fredericosilva)
+- [#497](https://github.com/request/request/pull/497) Added redirect event (@Cauldrath)
+- [#503](https://github.com/request/request/pull/503) Fix basic auth for passwords that contain colons (@tonistiigi)
+- [#521](https://github.com/request/request/pull/521) Improving test-localAddress.js (@noway)
+- [#529](https://github.com/request/request/pull/529) dependencies versions bump (@jodaka)
+- [#523](https://github.com/request/request/pull/523) Updating dependencies (@noway)
+- [#520](https://github.com/request/request/pull/520) Fixing test-tunnel.js (@noway)
+- [#519](https://github.com/request/request/pull/519) Update internal path state on post-creation QS changes (@jblebrun)
+- [#510](https://github.com/request/request/pull/510) Add HTTP Signature support. (@davidlehn)
+- [#502](https://github.com/request/request/pull/502) Fix POST (and probably other) requests that are retried after 401 Unauthorized (@nylen)
+- [#508](https://github.com/request/request/pull/508) Honor the .strictSSL option when using proxies (tunnel-agent) (@jhs)
+- [#512](https://github.com/request/request/pull/512) Make password optional to support the format: http://username@hostname/ (@pajato1)
+- [#513](https://github.com/request/request/pull/513) add 'localAddress' support (@yyfrankyy)
+- [#498](https://github.com/request/request/pull/498) Moving response emit above setHeaders on destination streams (@kenperkins)
+- [#490](https://github.com/request/request/pull/490) Empty response body (3-rd argument) must be passed to callback as an empty string (@Olegas)
+- [#479](https://github.com/request/request/pull/479) Changing so if Accept header is explicitly set, sending json does not ov... (@RoryH)
+- [#475](https://github.com/request/request/pull/475) Use `unescape` from `querystring` (@shimaore)
+- [#473](https://github.com/request/request/pull/473) V0.10 compat (@isaacs)
+- [#471](https://github.com/request/request/pull/471) Using querystring library from visionmedia (@kbackowski)
+- [#461](https://github.com/request/request/pull/461) Strip the UTF8 BOM from a UTF encoded response (@kppullin)
+- [#460](https://github.com/request/request/pull/460) hawk 0.10.0 (@hueniverse)
+- [#462](https://github.com/request/request/pull/462) if query params are empty, then request path shouldn't end with a '?' (merges cleanly now) (@jaipandya)
+- [#456](https://github.com/request/request/pull/456) hawk 0.9.0 (@hueniverse)
+- [#429](https://github.com/request/request/pull/429) Copy options before adding callback. (@nrn, @nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki)
+- [#454](https://github.com/request/request/pull/454) Destroy the response if present when destroying the request (clean merge) (@mafintosh)
+- [#310](https://github.com/request/request/pull/310) Twitter Oauth Stuff Out of Date; Now Updated (@joemccann, @isaacs, @mscdex)
+- [#413](https://github.com/request/request/pull/413) rename googledoodle.png to .jpg (@nfriedly, @youurayy, @jplock, @kapetan, @landeiro, @othiym23, @mmalecki)
+- [#448](https://github.com/request/request/pull/448) Convenience method for PATCH (@mloar)
+- [#444](https://github.com/request/request/pull/444) protect against double callbacks on error path (@spollack)
+- [#433](https://github.com/request/request/pull/433) Added support for HTTPS cert & key (@mmalecki)
+- [#430](https://github.com/request/request/pull/430) Respect specified {Host,host} headers, not just {host} (@andrewschaaf)
+- [#415](https://github.com/request/request/pull/415) Fixed a typo. (@jerem)
+- [#338](https://github.com/request/request/pull/338) Add more auth options, including digest support (@nylen)
+- [#403](https://github.com/request/request/pull/403) Optimize environment lookup to happen once only (@mmalecki)
+- [#398](https://github.com/request/request/pull/398) Add more reporting to tests (@mmalecki)
+- [#388](https://github.com/request/request/pull/388) Ensure "safe" toJSON doesn't break EventEmitters (@othiym23)
+- [#381](https://github.com/request/request/pull/381) Resolving "Invalid signature. Expected signature base string: " (@landeiro)
+- [#380](https://github.com/request/request/pull/380) Fixes missing host header on retried request when using forever agent (@mac-)
+- [#376](https://github.com/request/request/pull/376) Headers lost on redirect (@kapetan)
+- [#375](https://github.com/request/request/pull/375) Fix for missing oauth_timestamp parameter (@jplock)
+- [#374](https://github.com/request/request/pull/374) Correct Host header for proxy tunnel CONNECT (@youurayy)
+- [#370](https://github.com/request/request/pull/370) Twitter reverse auth uses x_auth_mode not x_auth_type (@drudge)
+- [#369](https://github.com/request/request/pull/369) Don't remove x_auth_mode for Twitter reverse auth (@drudge)
+- [#344](https://github.com/request/request/pull/344) Make AWS auth signing find headers correctly (@nlf)
+- [#363](https://github.com/request/request/pull/363) rfc3986 on base_uri, now passes tests (@jeffmarshall)
+- [#362](https://github.com/request/request/pull/362) Running `rfc3986` on `base_uri` in `oauth.hmacsign` instead of just `encodeURIComponent` (@jeffmarshall)
+- [#361](https://github.com/request/request/pull/361) Don't create a Content-Length header if we already have it set (@danjenkins)
+- [#360](https://github.com/request/request/pull/360) Delete self._form along with everything else on redirect (@jgautier)
+- [#355](https://github.com/request/request/pull/355) stop sending erroneous headers on redirected requests (@azylman)
+- [#332](https://github.com/request/request/pull/332) Fix #296 - Only set Content-Type if body exists (@Marsup)
+- [#343](https://github.com/request/request/pull/343) Allow AWS to work in more situations, added a note in the README on its usage (@nlf)
+- [#320](https://github.com/request/request/pull/320) request.defaults() doesn't need to wrap jar() (@StuartHarris)
+- [#322](https://github.com/request/request/pull/322) Fix + test for piped into request bumped into redirect. #321 (@alexindigo)
+- [#326](https://github.com/request/request/pull/326) Do not try to remove listener from an undefined connection (@CartoDB)
+- [#318](https://github.com/request/request/pull/318) Pass servername to tunneling secure socket creation (@isaacs)
+- [#317](https://github.com/request/request/pull/317) Workaround for #313 (@isaacs)
+- [#293](https://github.com/request/request/pull/293) Allow parser errors to bubble up to request (@mscdex)
+- [#290](https://github.com/request/request/pull/290) A test for #289 (@isaacs)
+- [#280](https://github.com/request/request/pull/280) Like in node.js print options if NODE_DEBUG contains the word request (@Filirom1)
+- [#207](https://github.com/request/request/pull/207) Fix #206 Change HTTP/HTTPS agent when redirecting between protocols (@isaacs)
+- [#214](https://github.com/request/request/pull/214) documenting additional behavior of json option (@jphaas, @vpulim)
+- [#272](https://github.com/request/request/pull/272) Boundary begins with CRLF? (@elspoono, @timshadel, @naholyr, @nanodocumet, @TehShrike)
+- [#284](https://github.com/request/request/pull/284) Remove stray `console.log()` call in multipart generator. (@bcherry)
+- [#241](https://github.com/request/request/pull/241) Composability updates suggested by issue #239 (@polotek)
+- [#282](https://github.com/request/request/pull/282) OAuth Authorization header contains non-"oauth_" parameters (@jplock)
+- [#279](https://github.com/request/request/pull/279) fix tests with boundary by injecting boundry from header (@benatkin)
+- [#273](https://github.com/request/request/pull/273) Pipe back pressure issue (@mafintosh)
+- [#268](https://github.com/request/request/pull/268) I'm not OCD seriously (@TehShrike)
+- [#263](https://github.com/request/request/pull/263) Bug in OAuth key generation for sha1 (@nanodocumet)
+- [#265](https://github.com/request/request/pull/265) uncaughtException when redirected to invalid URI (@naholyr)
+- [#262](https://github.com/request/request/pull/262) JSON test should check for equality (@timshadel)
+- [#261](https://github.com/request/request/pull/261) Setting 'pool' to 'false' does NOT disable Agent pooling (@timshadel)
+- [#249](https://github.com/request/request/pull/249) Fix for the fix of your (closed) issue #89 where self.headers[content-length] is set to 0 for all methods (@sethbridges, @polotek, @zephrax, @jeromegn)
+- [#255](https://github.com/request/request/pull/255) multipart allow body === '' ( the empty string ) (@Filirom1)
+- [#260](https://github.com/request/request/pull/260) fixed just another leak of 'i' (@sreuter)
+- [#246](https://github.com/request/request/pull/246) Fixing the set-cookie header (@jeromegn)
+- [#243](https://github.com/request/request/pull/243) Dynamic boundary (@zephrax)
+- [#240](https://github.com/request/request/pull/240) don't error when null is passed for options (@polotek)
+- [#211](https://github.com/request/request/pull/211) Replace all occurrences of special chars in RFC3986 (@chriso, @vpulim)
+- [#224](https://github.com/request/request/pull/224) Multipart content-type change (@janjongboom)
+- [#217](https://github.com/request/request/pull/217) need to use Authorization (titlecase) header with Tumblr OAuth (@visnup)
+- [#203](https://github.com/request/request/pull/203) Fix cookie and redirect bugs and add auth support for HTTPS tunnel (@vpulim)
+- [#199](https://github.com/request/request/pull/199) Tunnel (@isaacs)
+- [#198](https://github.com/request/request/pull/198) Bugfix on forever usage of util.inherits (@isaacs)
+- [#197](https://github.com/request/request/pull/197) Make ForeverAgent work with HTTPS (@isaacs)
+- [#193](https://github.com/request/request/pull/193) Fixes GH-119 (@goatslacker)
+- [#188](https://github.com/request/request/pull/188) Add abort support to the returned request (@itay)
+- [#176](https://github.com/request/request/pull/176) Querystring option (@csainty)
+- [#182](https://github.com/request/request/pull/182) Fix request.defaults to support (uri, options, callback) api (@twilson63)
+- [#180](https://github.com/request/request/pull/180) Modified the post, put, head and del shortcuts to support uri optional param (@twilson63)
+- [#179](https://github.com/request/request/pull/179) fix to add opts in .pipe(stream, opts) (@substack)
+- [#177](https://github.com/request/request/pull/177) Issue #173 Support uri as first and optional config as second argument (@twilson63)
+- [#170](https://github.com/request/request/pull/170) can't create a cookie in a wrapped request (defaults) (@fabianonunes)
+- [#168](https://github.com/request/request/pull/168) Picking off an EasyFix by adding some missing mimetypes. (@serby)
+- [#161](https://github.com/request/request/pull/161) Fix cookie jar/headers.cookie collision (#125) (@papandreou)
+- [#162](https://github.com/request/request/pull/162) Fix issue #159 (@dpetukhov)
+- [#90](https://github.com/request/request/pull/90) add option followAllRedirects to follow post/put redirects (@jroes)
+- [#148](https://github.com/request/request/pull/148) Retry Agent (@thejh)
+- [#146](https://github.com/request/request/pull/146) Multipart should respect content-type if previously set (@apeace)
+- [#144](https://github.com/request/request/pull/144) added "form" option to readme (@petejkim)
+- [#133](https://github.com/request/request/pull/133) Fixed cookies parsing (@afanasy)
+- [#135](https://github.com/request/request/pull/135) host vs hostname (@iangreenleaf)
+- [#132](https://github.com/request/request/pull/132) return the body as a Buffer when encoding is set to null (@jahewson)
+- [#112](https://github.com/request/request/pull/112) Support using a custom http-like module (@jhs)
+- [#104](https://github.com/request/request/pull/104) Cookie handling contains bugs (@janjongboom)
+- [#121](https://github.com/request/request/pull/121) Another patch for cookie handling regression (@jhurliman)
+- [#117](https://github.com/request/request/pull/117) Remove the global `i` (@3rd-Eden)
+- [#110](https://github.com/request/request/pull/110) Update to Iris Couch URL (@jhs)
+- [#86](https://github.com/request/request/pull/86) Can't post binary to multipart requests (@kkaefer)
+- [#105](https://github.com/request/request/pull/105) added test for proxy option. (@dominictarr)
+- [#102](https://github.com/request/request/pull/102) Implemented cookies - closes issue 82: https://github.com/mikeal/request/issues/82 (@alessioalex)
+- [#97](https://github.com/request/request/pull/97) Typo in previous pull causes TypeError in non-0.5.11 versions (@isaacs)
+- [#96](https://github.com/request/request/pull/96) Authless parsed url host support (@isaacs)
+- [#81](https://github.com/request/request/pull/81) Enhance redirect handling (@danmactough)
+- [#78](https://github.com/request/request/pull/78) Don't try to do strictSSL for non-ssl connections (@isaacs)
+- [#76](https://github.com/request/request/pull/76) Bug when a request fails and a timeout is set (@Marsup)
+- [#70](https://github.com/request/request/pull/70) add test script to package.json (@isaacs, @aheckmann)
+- [#73](https://github.com/request/request/pull/73) Fix #71 Respect the strictSSL flag (@isaacs)
+- [#69](https://github.com/request/request/pull/69) Flatten chunked requests properly (@isaacs)
+- [#67](https://github.com/request/request/pull/67) fixed global variable leaks (@aheckmann)
+- [#66](https://github.com/request/request/pull/66) Do not overwrite established content-type headers for read stream deliver (@voodootikigod)
+- [#53](https://github.com/request/request/pull/53) Parse json: Issue #51 (@benatkin)
+- [#45](https://github.com/request/request/pull/45) Added timeout option (@mbrevoort)
+- [#35](https://github.com/request/request/pull/35) The "end" event isn't emitted for some responses (@voxpelli)
+- [#31](https://github.com/request/request/pull/31) Error on piping a request to a destination (@tobowers)
\ No newline at end of file
diff --git a/familyark/app/node_modules/request/LICENSE b/familyark/app/node_modules/request/LICENSE
new file mode 100644
index 0000000..a4a9aee
--- /dev/null
+++ b/familyark/app/node_modules/request/LICENSE
@@ -0,0 +1,55 @@
+Apache License
+
+Version 2.0, January 2004
+
+http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
+
+"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
+
+"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
+
+"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
+
+"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
+
+"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
+
+"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
+
+"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
+
+"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
+
+"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
+
+You must give any other recipients of the Work or Derivative Works a copy of this License; and
+
+You must cause any modified files to carry prominent notices stating that You changed the files; and
+
+You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
+
+If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
\ No newline at end of file
diff --git a/familyark/app/node_modules/request/README.md b/familyark/app/node_modules/request/README.md
new file mode 100644
index 0000000..9da0eb7
--- /dev/null
+++ b/familyark/app/node_modules/request/README.md
@@ -0,0 +1,1133 @@
+# Deprecated!
+
+As of Feb 11th 2020, request is fully deprecated. No new changes are expected land. In fact, none have landed for some time.
+
+For more information about why request is deprecated and possible alternatives refer to
+[this issue](https://github.com/request/request/issues/3142).
+
+# Request - Simplified HTTP client
+
+[](https://nodei.co/npm/request/)
+
+[](https://travis-ci.org/request/request)
+[](https://codecov.io/github/request/request?branch=master)
+[](https://coveralls.io/r/request/request)
+[](https://david-dm.org/request/request)
+[](https://snyk.io/test/npm/request)
+[](https://gitter.im/request/request?utm_source=badge)
+
+
+## Super simple to use
+
+Request is designed to be the simplest way possible to make http calls. It supports HTTPS and follows redirects by default.
+
+```js
+const request = require('request');
+request('http://www.google.com', function (error, response, body) {
+ console.error('error:', error); // Print the error if one occurred
+ console.log('statusCode:', response && response.statusCode); // Print the response status code if a response was received
+ console.log('body:', body); // Print the HTML for the Google homepage.
+});
+```
+
+
+## Table of contents
+
+- [Streaming](#streaming)
+- [Promises & Async/Await](#promises--asyncawait)
+- [Forms](#forms)
+- [HTTP Authentication](#http-authentication)
+- [Custom HTTP Headers](#custom-http-headers)
+- [OAuth Signing](#oauth-signing)
+- [Proxies](#proxies)
+- [Unix Domain Sockets](#unix-domain-sockets)
+- [TLS/SSL Protocol](#tlsssl-protocol)
+- [Support for HAR 1.2](#support-for-har-12)
+- [**All Available Options**](#requestoptions-callback)
+
+Request also offers [convenience methods](#convenience-methods) like
+`request.defaults` and `request.post`, and there are
+lots of [usage examples](#examples) and several
+[debugging techniques](#debugging).
+
+
+---
+
+
+## Streaming
+
+You can stream any response to a file stream.
+
+```js
+request('http://google.com/doodle.png').pipe(fs.createWriteStream('doodle.png'))
+```
+
+You can also stream a file to a PUT or POST request. This method will also check the file extension against a mapping of file extensions to content-types (in this case `application/json`) and use the proper `content-type` in the PUT request (if the headers don’t already provide one).
+
+```js
+fs.createReadStream('file.json').pipe(request.put('http://mysite.com/obj.json'))
+```
+
+Request can also `pipe` to itself. When doing so, `content-type` and `content-length` are preserved in the PUT headers.
+
+```js
+request.get('http://google.com/img.png').pipe(request.put('http://mysite.com/img.png'))
+```
+
+Request emits a "response" event when a response is received. The `response` argument will be an instance of [http.IncomingMessage](https://nodejs.org/api/http.html#http_class_http_incomingmessage).
+
+```js
+request
+ .get('http://google.com/img.png')
+ .on('response', function(response) {
+ console.log(response.statusCode) // 200
+ console.log(response.headers['content-type']) // 'image/png'
+ })
+ .pipe(request.put('http://mysite.com/img.png'))
+```
+
+To easily handle errors when streaming requests, listen to the `error` event before piping:
+
+```js
+request
+ .get('http://mysite.com/doodle.png')
+ .on('error', function(err) {
+ console.error(err)
+ })
+ .pipe(fs.createWriteStream('doodle.png'))
+```
+
+Now let’s get fancy.
+
+```js
+http.createServer(function (req, resp) {
+ if (req.url === '/doodle.png') {
+ if (req.method === 'PUT') {
+ req.pipe(request.put('http://mysite.com/doodle.png'))
+ } else if (req.method === 'GET' || req.method === 'HEAD') {
+ request.get('http://mysite.com/doodle.png').pipe(resp)
+ }
+ }
+})
+```
+
+You can also `pipe()` from `http.ServerRequest` instances, as well as to `http.ServerResponse` instances. The HTTP method, headers, and entity-body data will be sent. Which means that, if you don't really care about security, you can do:
+
+```js
+http.createServer(function (req, resp) {
+ if (req.url === '/doodle.png') {
+ const x = request('http://mysite.com/doodle.png')
+ req.pipe(x)
+ x.pipe(resp)
+ }
+})
+```
+
+And since `pipe()` returns the destination stream in ≥ Node 0.5.x you can do one line proxying. :)
+
+```js
+req.pipe(request('http://mysite.com/doodle.png')).pipe(resp)
+```
+
+Also, none of this new functionality conflicts with requests previous features, it just expands them.
+
+```js
+const r = request.defaults({'proxy':'http://localproxy.com'})
+
+http.createServer(function (req, resp) {
+ if (req.url === '/doodle.png') {
+ r.get('http://google.com/doodle.png').pipe(resp)
+ }
+})
+```
+
+You can still use intermediate proxies, the requests will still follow HTTP forwards, etc.
+
+[back to top](#table-of-contents)
+
+
+---
+
+
+## Promises & Async/Await
+
+`request` supports both streaming and callback interfaces natively. If you'd like `request` to return a Promise instead, you can use an alternative interface wrapper for `request`. These wrappers can be useful if you prefer to work with Promises, or if you'd like to use `async`/`await` in ES2017.
+
+Several alternative interfaces are provided by the request team, including:
+- [`request-promise`](https://github.com/request/request-promise) (uses [Bluebird](https://github.com/petkaantonov/bluebird) Promises)
+- [`request-promise-native`](https://github.com/request/request-promise-native) (uses native Promises)
+- [`request-promise-any`](https://github.com/request/request-promise-any) (uses [any-promise](https://www.npmjs.com/package/any-promise) Promises)
+
+Also, [`util.promisify`](https://nodejs.org/api/util.html#util_util_promisify_original), which is available from Node.js v8.0 can be used to convert a regular function that takes a callback to return a promise instead.
+
+
+[back to top](#table-of-contents)
+
+
+---
+
+
+## Forms
+
+`request` supports `application/x-www-form-urlencoded` and `multipart/form-data` form uploads. For `multipart/related` refer to the `multipart` API.
+
+
+#### application/x-www-form-urlencoded (URL-Encoded Forms)
+
+URL-encoded forms are simple.
+
+```js
+request.post('http://service.com/upload', {form:{key:'value'}})
+// or
+request.post('http://service.com/upload').form({key:'value'})
+// or
+request.post({url:'http://service.com/upload', form: {key:'value'}}, function(err,httpResponse,body){ /* ... */ })
+```
+
+
+#### multipart/form-data (Multipart Form Uploads)
+
+For `multipart/form-data` we use the [form-data](https://github.com/form-data/form-data) library by [@felixge](https://github.com/felixge). For the most cases, you can pass your upload form data via the `formData` option.
+
+
+```js
+const formData = {
+ // Pass a simple key-value pair
+ my_field: 'my_value',
+ // Pass data via Buffers
+ my_buffer: Buffer.from([1, 2, 3]),
+ // Pass data via Streams
+ my_file: fs.createReadStream(__dirname + '/unicycle.jpg'),
+ // Pass multiple values /w an Array
+ attachments: [
+ fs.createReadStream(__dirname + '/attachment1.jpg'),
+ fs.createReadStream(__dirname + '/attachment2.jpg')
+ ],
+ // Pass optional meta-data with an 'options' object with style: {value: DATA, options: OPTIONS}
+ // Use case: for some types of streams, you'll need to provide "file"-related information manually.
+ // See the `form-data` README for more information about options: https://github.com/form-data/form-data
+ custom_file: {
+ value: fs.createReadStream('/dev/urandom'),
+ options: {
+ filename: 'topsecret.jpg',
+ contentType: 'image/jpeg'
+ }
+ }
+};
+request.post({url:'http://service.com/upload', formData: formData}, function optionalCallback(err, httpResponse, body) {
+ if (err) {
+ return console.error('upload failed:', err);
+ }
+ console.log('Upload successful! Server responded with:', body);
+});
+```
+
+For advanced cases, you can access the form-data object itself via `r.form()`. This can be modified until the request is fired on the next cycle of the event-loop. (Note that this calling `form()` will clear the currently set form data for that request.)
+
+```js
+// NOTE: Advanced use-case, for normal use see 'formData' usage above
+const r = request.post('http://service.com/upload', function optionalCallback(err, httpResponse, body) {...})
+const form = r.form();
+form.append('my_field', 'my_value');
+form.append('my_buffer', Buffer.from([1, 2, 3]));
+form.append('custom_file', fs.createReadStream(__dirname + '/unicycle.jpg'), {filename: 'unicycle.jpg'});
+```
+See the [form-data README](https://github.com/form-data/form-data) for more information & examples.
+
+
+#### multipart/related
+
+Some variations in different HTTP implementations require a newline/CRLF before, after, or both before and after the boundary of a `multipart/related` request (using the multipart option). This has been observed in the .NET WebAPI version 4.0. You can turn on a boundary preambleCRLF or postamble by passing them as `true` to your request options.
+
+```js
+ request({
+ method: 'PUT',
+ preambleCRLF: true,
+ postambleCRLF: true,
+ uri: 'http://service.com/upload',
+ multipart: [
+ {
+ 'content-type': 'application/json',
+ body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
+ },
+ { body: 'I am an attachment' },
+ { body: fs.createReadStream('image.png') }
+ ],
+ // alternatively pass an object containing additional options
+ multipart: {
+ chunked: false,
+ data: [
+ {
+ 'content-type': 'application/json',
+ body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
+ },
+ { body: 'I am an attachment' }
+ ]
+ }
+ },
+ function (error, response, body) {
+ if (error) {
+ return console.error('upload failed:', error);
+ }
+ console.log('Upload successful! Server responded with:', body);
+ })
+```
+
+[back to top](#table-of-contents)
+
+
+---
+
+
+## HTTP Authentication
+
+```js
+request.get('http://some.server.com/').auth('username', 'password', false);
+// or
+request.get('http://some.server.com/', {
+ 'auth': {
+ 'user': 'username',
+ 'pass': 'password',
+ 'sendImmediately': false
+ }
+});
+// or
+request.get('http://some.server.com/').auth(null, null, true, 'bearerToken');
+// or
+request.get('http://some.server.com/', {
+ 'auth': {
+ 'bearer': 'bearerToken'
+ }
+});
+```
+
+If passed as an option, `auth` should be a hash containing values:
+
+- `user` || `username`
+- `pass` || `password`
+- `sendImmediately` (optional)
+- `bearer` (optional)
+
+The method form takes parameters
+`auth(username, password, sendImmediately, bearer)`.
+
+`sendImmediately` defaults to `true`, which causes a basic or bearer
+authentication header to be sent. If `sendImmediately` is `false`, then
+`request` will retry with a proper authentication header after receiving a
+`401` response from the server (which must contain a `WWW-Authenticate` header
+indicating the required authentication method).
+
+Note that you can also specify basic authentication using the URL itself, as
+detailed in [RFC 1738](http://www.ietf.org/rfc/rfc1738.txt). Simply pass the
+`user:password` before the host with an `@` sign:
+
+```js
+const username = 'username',
+ password = 'password',
+ url = 'http://' + username + ':' + password + '@some.server.com';
+
+request({url}, function (error, response, body) {
+ // Do more stuff with 'body' here
+});
+```
+
+Digest authentication is supported, but it only works with `sendImmediately`
+set to `false`; otherwise `request` will send basic authentication on the
+initial request, which will probably cause the request to fail.
+
+Bearer authentication is supported, and is activated when the `bearer` value is
+available. The value may be either a `String` or a `Function` returning a
+`String`. Using a function to supply the bearer token is particularly useful if
+used in conjunction with `defaults` to allow a single function to supply the
+last known token at the time of sending a request, or to compute one on the fly.
+
+[back to top](#table-of-contents)
+
+
+---
+
+
+## Custom HTTP Headers
+
+HTTP Headers, such as `User-Agent`, can be set in the `options` object.
+In the example below, we call the github API to find out the number
+of stars and forks for the request repository. This requires a
+custom `User-Agent` header as well as https.
+
+```js
+const request = require('request');
+
+const options = {
+ url: 'https://api.github.com/repos/request/request',
+ headers: {
+ 'User-Agent': 'request'
+ }
+};
+
+function callback(error, response, body) {
+ if (!error && response.statusCode == 200) {
+ const info = JSON.parse(body);
+ console.log(info.stargazers_count + " Stars");
+ console.log(info.forks_count + " Forks");
+ }
+}
+
+request(options, callback);
+```
+
+[back to top](#table-of-contents)
+
+
+---
+
+
+## OAuth Signing
+
+[OAuth version 1.0](https://tools.ietf.org/html/rfc5849) is supported. The
+default signing algorithm is
+[HMAC-SHA1](https://tools.ietf.org/html/rfc5849#section-3.4.2):
+
+```js
+// OAuth1.0 - 3-legged server side flow (Twitter example)
+// step 1
+const qs = require('querystring')
+ , oauth =
+ { callback: 'http://mysite.com/callback/'
+ , consumer_key: CONSUMER_KEY
+ , consumer_secret: CONSUMER_SECRET
+ }
+ , url = 'https://api.twitter.com/oauth/request_token'
+ ;
+request.post({url:url, oauth:oauth}, function (e, r, body) {
+ // Ideally, you would take the body in the response
+ // and construct a URL that a user clicks on (like a sign in button).
+ // The verifier is only available in the response after a user has
+ // verified with twitter that they are authorizing your app.
+
+ // step 2
+ const req_data = qs.parse(body)
+ const uri = 'https://api.twitter.com/oauth/authenticate'
+ + '?' + qs.stringify({oauth_token: req_data.oauth_token})
+ // redirect the user to the authorize uri
+
+ // step 3
+ // after the user is redirected back to your server
+ const auth_data = qs.parse(body)
+ , oauth =
+ { consumer_key: CONSUMER_KEY
+ , consumer_secret: CONSUMER_SECRET
+ , token: auth_data.oauth_token
+ , token_secret: req_data.oauth_token_secret
+ , verifier: auth_data.oauth_verifier
+ }
+ , url = 'https://api.twitter.com/oauth/access_token'
+ ;
+ request.post({url:url, oauth:oauth}, function (e, r, body) {
+ // ready to make signed requests on behalf of the user
+ const perm_data = qs.parse(body)
+ , oauth =
+ { consumer_key: CONSUMER_KEY
+ , consumer_secret: CONSUMER_SECRET
+ , token: perm_data.oauth_token
+ , token_secret: perm_data.oauth_token_secret
+ }
+ , url = 'https://api.twitter.com/1.1/users/show.json'
+ , qs =
+ { screen_name: perm_data.screen_name
+ , user_id: perm_data.user_id
+ }
+ ;
+ request.get({url:url, oauth:oauth, qs:qs, json:true}, function (e, r, user) {
+ console.log(user)
+ })
+ })
+})
+```
+
+For [RSA-SHA1 signing](https://tools.ietf.org/html/rfc5849#section-3.4.3), make
+the following changes to the OAuth options object:
+* Pass `signature_method : 'RSA-SHA1'`
+* Instead of `consumer_secret`, specify a `private_key` string in
+ [PEM format](http://how2ssl.com/articles/working_with_pem_files/)
+
+For [PLAINTEXT signing](http://oauth.net/core/1.0/#anchor22), make
+the following changes to the OAuth options object:
+* Pass `signature_method : 'PLAINTEXT'`
+
+To send OAuth parameters via query params or in a post body as described in The
+[Consumer Request Parameters](http://oauth.net/core/1.0/#consumer_req_param)
+section of the oauth1 spec:
+* Pass `transport_method : 'query'` or `transport_method : 'body'` in the OAuth
+ options object.
+* `transport_method` defaults to `'header'`
+
+To use [Request Body Hash](https://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html) you can either
+* Manually generate the body hash and pass it as a string `body_hash: '...'`
+* Automatically generate the body hash by passing `body_hash: true`
+
+[back to top](#table-of-contents)
+
+
+---
+
+
+## Proxies
+
+If you specify a `proxy` option, then the request (and any subsequent
+redirects) will be sent via a connection to the proxy server.
+
+If your endpoint is an `https` url, and you are using a proxy, then
+request will send a `CONNECT` request to the proxy server *first*, and
+then use the supplied connection to connect to the endpoint.
+
+That is, first it will make a request like:
+
+```
+HTTP/1.1 CONNECT endpoint-server.com:80
+Host: proxy-server.com
+User-Agent: whatever user agent you specify
+```
+
+and then the proxy server make a TCP connection to `endpoint-server`
+on port `80`, and return a response that looks like:
+
+```
+HTTP/1.1 200 OK
+```
+
+At this point, the connection is left open, and the client is
+communicating directly with the `endpoint-server.com` machine.
+
+See [the wikipedia page on HTTP Tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel)
+for more information.
+
+By default, when proxying `http` traffic, request will simply make a
+standard proxied `http` request. This is done by making the `url`
+section of the initial line of the request a fully qualified url to
+the endpoint.
+
+For example, it will make a single request that looks like:
+
+```
+HTTP/1.1 GET http://endpoint-server.com/some-url
+Host: proxy-server.com
+Other-Headers: all go here
+
+request body or whatever
+```
+
+Because a pure "http over http" tunnel offers no additional security
+or other features, it is generally simpler to go with a
+straightforward HTTP proxy in this case. However, if you would like
+to force a tunneling proxy, you may set the `tunnel` option to `true`.
+
+You can also make a standard proxied `http` request by explicitly setting
+`tunnel : false`, but **note that this will allow the proxy to see the traffic
+to/from the destination server**.
+
+If you are using a tunneling proxy, you may set the
+`proxyHeaderWhiteList` to share certain headers with the proxy.
+
+You can also set the `proxyHeaderExclusiveList` to share certain
+headers only with the proxy and not with destination host.
+
+By default, this set is:
+
+```
+accept
+accept-charset
+accept-encoding
+accept-language
+accept-ranges
+cache-control
+content-encoding
+content-language
+content-length
+content-location
+content-md5
+content-range
+content-type
+connection
+date
+expect
+max-forwards
+pragma
+proxy-authorization
+referer
+te
+transfer-encoding
+user-agent
+via
+```
+
+Note that, when using a tunneling proxy, the `proxy-authorization`
+header and any headers from custom `proxyHeaderExclusiveList` are
+*never* sent to the endpoint server, but only to the proxy server.
+
+
+### Controlling proxy behaviour using environment variables
+
+The following environment variables are respected by `request`:
+
+ * `HTTP_PROXY` / `http_proxy`
+ * `HTTPS_PROXY` / `https_proxy`
+ * `NO_PROXY` / `no_proxy`
+
+When `HTTP_PROXY` / `http_proxy` are set, they will be used to proxy non-SSL requests that do not have an explicit `proxy` configuration option present. Similarly, `HTTPS_PROXY` / `https_proxy` will be respected for SSL requests that do not have an explicit `proxy` configuration option. It is valid to define a proxy in one of the environment variables, but then override it for a specific request, using the `proxy` configuration option. Furthermore, the `proxy` configuration option can be explicitly set to false / null to opt out of proxying altogether for that request.
+
+`request` is also aware of the `NO_PROXY`/`no_proxy` environment variables. These variables provide a granular way to opt out of proxying, on a per-host basis. It should contain a comma separated list of hosts to opt out of proxying. It is also possible to opt of proxying when a particular destination port is used. Finally, the variable may be set to `*` to opt out of the implicit proxy configuration of the other environment variables.
+
+Here's some examples of valid `no_proxy` values:
+
+ * `google.com` - don't proxy HTTP/HTTPS requests to Google.
+ * `google.com:443` - don't proxy HTTPS requests to Google, but *do* proxy HTTP requests to Google.
+ * `google.com:443, yahoo.com:80` - don't proxy HTTPS requests to Google, and don't proxy HTTP requests to Yahoo!
+ * `*` - ignore `https_proxy`/`http_proxy` environment variables altogether.
+
+[back to top](#table-of-contents)
+
+
+---
+
+
+## UNIX Domain Sockets
+
+`request` supports making requests to [UNIX Domain Sockets](https://en.wikipedia.org/wiki/Unix_domain_socket). To make one, use the following URL scheme:
+
+```js
+/* Pattern */ 'http://unix:SOCKET:PATH'
+/* Example */ request.get('http://unix:/absolute/path/to/unix.socket:/request/path')
+```
+
+Note: The `SOCKET` path is assumed to be absolute to the root of the host file system.
+
+[back to top](#table-of-contents)
+
+
+---
+
+
+## TLS/SSL Protocol
+
+TLS/SSL Protocol options, such as `cert`, `key` and `passphrase`, can be
+set directly in `options` object, in the `agentOptions` property of the `options` object, or even in `https.globalAgent.options`. Keep in mind that, although `agentOptions` allows for a slightly wider range of configurations, the recommended way is via `options` object directly, as using `agentOptions` or `https.globalAgent.options` would not be applied in the same way in proxied environments (as data travels through a TLS connection instead of an http/https agent).
+
+```js
+const fs = require('fs')
+ , path = require('path')
+ , certFile = path.resolve(__dirname, 'ssl/client.crt')
+ , keyFile = path.resolve(__dirname, 'ssl/client.key')
+ , caFile = path.resolve(__dirname, 'ssl/ca.cert.pem')
+ , request = require('request');
+
+const options = {
+ url: 'https://api.some-server.com/',
+ cert: fs.readFileSync(certFile),
+ key: fs.readFileSync(keyFile),
+ passphrase: 'password',
+ ca: fs.readFileSync(caFile)
+};
+
+request.get(options);
+```
+
+### Using `options.agentOptions`
+
+In the example below, we call an API that requires client side SSL certificate
+(in PEM format) with passphrase protected private key (in PEM format) and disable the SSLv3 protocol:
+
+```js
+const fs = require('fs')
+ , path = require('path')
+ , certFile = path.resolve(__dirname, 'ssl/client.crt')
+ , keyFile = path.resolve(__dirname, 'ssl/client.key')
+ , request = require('request');
+
+const options = {
+ url: 'https://api.some-server.com/',
+ agentOptions: {
+ cert: fs.readFileSync(certFile),
+ key: fs.readFileSync(keyFile),
+ // Or use `pfx` property replacing `cert` and `key` when using private key, certificate and CA certs in PFX or PKCS12 format:
+ // pfx: fs.readFileSync(pfxFilePath),
+ passphrase: 'password',
+ securityOptions: 'SSL_OP_NO_SSLv3'
+ }
+};
+
+request.get(options);
+```
+
+It is able to force using SSLv3 only by specifying `secureProtocol`:
+
+```js
+request.get({
+ url: 'https://api.some-server.com/',
+ agentOptions: {
+ secureProtocol: 'SSLv3_method'
+ }
+});
+```
+
+It is possible to accept other certificates than those signed by generally allowed Certificate Authorities (CAs).
+This can be useful, for example, when using self-signed certificates.
+To require a different root certificate, you can specify the signing CA by adding the contents of the CA's certificate file to the `agentOptions`.
+The certificate the domain presents must be signed by the root certificate specified:
+
+```js
+request.get({
+ url: 'https://api.some-server.com/',
+ agentOptions: {
+ ca: fs.readFileSync('ca.cert.pem')
+ }
+});
+```
+
+The `ca` value can be an array of certificates, in the event you have a private or internal corporate public-key infrastructure hierarchy. For example, if you want to connect to https://api.some-server.com which presents a key chain consisting of:
+1. its own public key, which is signed by:
+2. an intermediate "Corp Issuing Server", that is in turn signed by:
+3. a root CA "Corp Root CA";
+
+you can configure your request as follows:
+
+```js
+request.get({
+ url: 'https://api.some-server.com/',
+ agentOptions: {
+ ca: [
+ fs.readFileSync('Corp Issuing Server.pem'),
+ fs.readFileSync('Corp Root CA.pem')
+ ]
+ }
+});
+```
+
+[back to top](#table-of-contents)
+
+
+---
+
+## Support for HAR 1.2
+
+The `options.har` property will override the values: `url`, `method`, `qs`, `headers`, `form`, `formData`, `body`, `json`, as well as construct multipart data and read files from disk when `request.postData.params[].fileName` is present without a matching `value`.
+
+A validation step will check if the HAR Request format matches the latest spec (v1.2) and will skip parsing if not matching.
+
+```js
+ const request = require('request')
+ request({
+ // will be ignored
+ method: 'GET',
+ uri: 'http://www.google.com',
+
+ // HTTP Archive Request Object
+ har: {
+ url: 'http://www.mockbin.com/har',
+ method: 'POST',
+ headers: [
+ {
+ name: 'content-type',
+ value: 'application/x-www-form-urlencoded'
+ }
+ ],
+ postData: {
+ mimeType: 'application/x-www-form-urlencoded',
+ params: [
+ {
+ name: 'foo',
+ value: 'bar'
+ },
+ {
+ name: 'hello',
+ value: 'world'
+ }
+ ]
+ }
+ }
+ })
+
+ // a POST request will be sent to http://www.mockbin.com
+ // with body an application/x-www-form-urlencoded body:
+ // foo=bar&hello=world
+```
+
+[back to top](#table-of-contents)
+
+
+---
+
+## request(options, callback)
+
+The first argument can be either a `url` or an `options` object. The only required option is `uri`; all others are optional.
+
+- `uri` || `url` - fully qualified uri or a parsed url object from `url.parse()`
+- `baseUrl` - fully qualified uri string used as the base url. Most useful with `request.defaults`, for example when you want to do many requests to the same domain. If `baseUrl` is `https://example.com/api/`, then requesting `/end/point?test=true` will fetch `https://example.com/api/end/point?test=true`. When `baseUrl` is given, `uri` must also be a string.
+- `method` - http method (default: `"GET"`)
+- `headers` - http headers (default: `{}`)
+
+---
+
+- `qs` - object containing querystring values to be appended to the `uri`
+- `qsParseOptions` - object containing options to pass to the [qs.parse](https://github.com/hapijs/qs#parsing-objects) method. Alternatively pass options to the [querystring.parse](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_parse_str_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}`
+- `qsStringifyOptions` - object containing options to pass to the [qs.stringify](https://github.com/hapijs/qs#stringifying) method. Alternatively pass options to the [querystring.stringify](https://nodejs.org/docs/v0.12.0/api/querystring.html#querystring_querystring_stringify_obj_sep_eq_options) method using this format `{sep:';', eq:':', options:{}}`. For example, to change the way arrays are converted to query strings using the `qs` module pass the `arrayFormat` option with one of `indices|brackets|repeat`
+- `useQuerystring` - if true, use `querystring` to stringify and parse
+ querystrings, otherwise use `qs` (default: `false`). Set this option to
+ `true` if you need arrays to be serialized as `foo=bar&foo=baz` instead of the
+ default `foo[0]=bar&foo[1]=baz`.
+
+---
+
+- `body` - entity body for PATCH, POST and PUT requests. Must be a `Buffer`, `String` or `ReadStream`. If `json` is `true`, then `body` must be a JSON-serializable object.
+- `form` - when passed an object or a querystring, this sets `body` to a querystring representation of value, and adds `Content-type: application/x-www-form-urlencoded` header. When passed no options, a `FormData` instance is returned (and is piped to request). See "Forms" section above.
+- `formData` - data to pass for a `multipart/form-data` request. See
+ [Forms](#forms) section above.
+- `multipart` - array of objects which contain their own headers and `body`
+ attributes. Sends a `multipart/related` request. See [Forms](#forms) section
+ above.
+ - Alternatively you can pass in an object `{chunked: false, data: []}` where
+ `chunked` is used to specify whether the request is sent in
+ [chunked transfer encoding](https://en.wikipedia.org/wiki/Chunked_transfer_encoding)
+ In non-chunked requests, data items with body streams are not allowed.
+- `preambleCRLF` - append a newline/CRLF before the boundary of your `multipart/form-data` request.
+- `postambleCRLF` - append a newline/CRLF at the end of the boundary of your `multipart/form-data` request.
+- `json` - sets `body` to JSON representation of value and adds `Content-type: application/json` header. Additionally, parses the response body as JSON.
+- `jsonReviver` - a [reviver function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/parse) that will be passed to `JSON.parse()` when parsing a JSON response body.
+- `jsonReplacer` - a [replacer function](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify) that will be passed to `JSON.stringify()` when stringifying a JSON request body.
+
+---
+
+- `auth` - a hash containing values `user` || `username`, `pass` || `password`, and `sendImmediately` (optional). See documentation above.
+- `oauth` - options for OAuth HMAC-SHA1 signing. See documentation above.
+- `hawk` - options for [Hawk signing](https://github.com/hueniverse/hawk). The `credentials` key must contain the necessary signing info, [see hawk docs for details](https://github.com/hueniverse/hawk#usage-example).
+- `aws` - `object` containing AWS signing information. Should have the properties `key`, `secret`, and optionally `session` (note that this only works for services that require session as part of the canonical string). Also requires the property `bucket`, unless you’re specifying your `bucket` as part of the path, or the request doesn’t use a bucket (i.e. GET Services). If you want to use AWS sign version 4 use the parameter `sign_version` with value `4` otherwise the default is version 2. If you are using SigV4, you can also include a `service` property that specifies the service name. **Note:** you need to `npm install aws4` first.
+- `httpSignature` - options for the [HTTP Signature Scheme](https://github.com/joyent/node-http-signature/blob/master/http_signing.md) using [Joyent's library](https://github.com/joyent/node-http-signature). The `keyId` and `key` properties must be specified. See the docs for other options.
+
+---
+
+- `followRedirect` - follow HTTP 3xx responses as redirects (default: `true`). This property can also be implemented as function which gets `response` object as a single argument and should return `true` if redirects should continue or `false` otherwise.
+- `followAllRedirects` - follow non-GET HTTP 3xx responses as redirects (default: `false`)
+- `followOriginalHttpMethod` - by default we redirect to HTTP method GET. you can enable this property to redirect to the original HTTP method (default: `false`)
+- `maxRedirects` - the maximum number of redirects to follow (default: `10`)
+- `removeRefererHeader` - removes the referer header when a redirect happens (default: `false`). **Note:** if true, referer header set in the initial request is preserved during redirect chain.
+
+---
+
+- `encoding` - encoding to be used on `setEncoding` of response data. If `null`, the `body` is returned as a `Buffer`. Anything else **(including the default value of `undefined`)** will be passed as the [encoding](http://nodejs.org/api/buffer.html#buffer_buffer) parameter to `toString()` (meaning this is effectively `utf8` by default). (**Note:** if you expect binary data, you should set `encoding: null`.)
+- `gzip` - if `true`, add an `Accept-Encoding` header to request compressed content encodings from the server (if not already present) and decode supported content encodings in the response. **Note:** Automatic decoding of the response content is performed on the body data returned through `request` (both through the `request` stream and passed to the callback function) but is not performed on the `response` stream (available from the `response` event) which is the unmodified `http.IncomingMessage` object which may contain compressed data. See example below.
+- `jar` - if `true`, remember cookies for future use (or define your custom cookie jar; see examples section)
+
+---
+
+- `agent` - `http(s).Agent` instance to use
+- `agentClass` - alternatively specify your agent's class name
+- `agentOptions` - and pass its options. **Note:** for HTTPS see [tls API doc for TLS/SSL options](http://nodejs.org/api/tls.html#tls_tls_connect_options_callback) and the [documentation above](#using-optionsagentoptions).
+- `forever` - set to `true` to use the [forever-agent](https://github.com/request/forever-agent) **Note:** Defaults to `http(s).Agent({keepAlive:true})` in node 0.12+
+- `pool` - an object describing which agents to use for the request. If this option is omitted the request will use the global agent (as long as your options allow for it). Otherwise, request will search the pool for your custom agent. If no custom agent is found, a new agent will be created and added to the pool. **Note:** `pool` is used only when the `agent` option is not specified.
+ - A `maxSockets` property can also be provided on the `pool` object to set the max number of sockets for all agents created (ex: `pool: {maxSockets: Infinity}`).
+ - Note that if you are sending multiple requests in a loop and creating
+ multiple new `pool` objects, `maxSockets` will not work as intended. To
+ work around this, either use [`request.defaults`](#requestdefaultsoptions)
+ with your pool options or create the pool object with the `maxSockets`
+ property outside of the loop.
+- `timeout` - integer containing number of milliseconds, controls two timeouts.
+ - **Read timeout**: Time to wait for a server to send response headers (and start the response body) before aborting the request.
+ - **Connection timeout**: Sets the socket to timeout after `timeout` milliseconds of inactivity. Note that increasing the timeout beyond the OS-wide TCP connection timeout will not have any effect ([the default in Linux can be anywhere from 20-120 seconds][linux-timeout])
+
+[linux-timeout]: http://www.sekuda.com/overriding_the_default_linux_kernel_20_second_tcp_socket_connect_timeout
+
+---
+
+- `localAddress` - local interface to bind for network connections.
+- `proxy` - an HTTP proxy to be used. Supports proxy Auth with Basic Auth, identical to support for the `url` parameter (by embedding the auth info in the `uri`)
+- `strictSSL` - if `true`, requires SSL certificates be valid. **Note:** to use your own certificate authority, you need to specify an agent that was created with that CA as an option.
+- `tunnel` - controls the behavior of
+ [HTTP `CONNECT` tunneling](https://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_tunneling)
+ as follows:
+ - `undefined` (default) - `true` if the destination is `https`, `false` otherwise
+ - `true` - always tunnel to the destination by making a `CONNECT` request to
+ the proxy
+ - `false` - request the destination as a `GET` request.
+- `proxyHeaderWhiteList` - a whitelist of headers to send to a
+ tunneling proxy.
+- `proxyHeaderExclusiveList` - a whitelist of headers to send
+ exclusively to a tunneling proxy and not to destination.
+
+---
+
+- `time` - if `true`, the request-response cycle (including all redirects) is timed at millisecond resolution. When set, the following properties are added to the response object:
+ - `elapsedTime` Duration of the entire request/response in milliseconds (*deprecated*).
+ - `responseStartTime` Timestamp when the response began (in Unix Epoch milliseconds) (*deprecated*).
+ - `timingStart` Timestamp of the start of the request (in Unix Epoch milliseconds).
+ - `timings` Contains event timestamps in millisecond resolution relative to `timingStart`. If there were redirects, the properties reflect the timings of the final request in the redirect chain:
+ - `socket` Relative timestamp when the [`http`](https://nodejs.org/api/http.html#http_event_socket) module's `socket` event fires. This happens when the socket is assigned to the request.
+ - `lookup` Relative timestamp when the [`net`](https://nodejs.org/api/net.html#net_event_lookup) module's `lookup` event fires. This happens when the DNS has been resolved.
+ - `connect`: Relative timestamp when the [`net`](https://nodejs.org/api/net.html#net_event_connect) module's `connect` event fires. This happens when the server acknowledges the TCP connection.
+ - `response`: Relative timestamp when the [`http`](https://nodejs.org/api/http.html#http_event_response) module's `response` event fires. This happens when the first bytes are received from the server.
+ - `end`: Relative timestamp when the last bytes of the response are received.
+ - `timingPhases` Contains the durations of each request phase. If there were redirects, the properties reflect the timings of the final request in the redirect chain:
+ - `wait`: Duration of socket initialization (`timings.socket`)
+ - `dns`: Duration of DNS lookup (`timings.lookup` - `timings.socket`)
+ - `tcp`: Duration of TCP connection (`timings.connect` - `timings.socket`)
+ - `firstByte`: Duration of HTTP server response (`timings.response` - `timings.connect`)
+ - `download`: Duration of HTTP download (`timings.end` - `timings.response`)
+ - `total`: Duration entire HTTP round-trip (`timings.end`)
+
+- `har` - a [HAR 1.2 Request Object](http://www.softwareishard.com/blog/har-12-spec/#request), will be processed from HAR format into options overwriting matching values *(see the [HAR 1.2 section](#support-for-har-12) for details)*
+- `callback` - alternatively pass the request's callback in the options object
+
+The callback argument gets 3 arguments:
+
+1. An `error` when applicable (usually from [`http.ClientRequest`](http://nodejs.org/api/http.html#http_class_http_clientrequest) object)
+2. An [`http.IncomingMessage`](https://nodejs.org/api/http.html#http_class_http_incomingmessage) object (Response object)
+3. The third is the `response` body (`String` or `Buffer`, or JSON object if the `json` option is supplied)
+
+[back to top](#table-of-contents)
+
+
+---
+
+## Convenience methods
+
+There are also shorthand methods for different HTTP METHODs and some other conveniences.
+
+
+### request.defaults(options)
+
+This method **returns a wrapper** around the normal request API that defaults
+to whatever options you pass to it.
+
+**Note:** `request.defaults()` **does not** modify the global request API;
+instead, it **returns a wrapper** that has your default settings applied to it.
+
+**Note:** You can call `.defaults()` on the wrapper that is returned from
+`request.defaults` to add/override defaults that were previously defaulted.
+
+For example:
+```js
+//requests using baseRequest() will set the 'x-token' header
+const baseRequest = request.defaults({
+ headers: {'x-token': 'my-token'}
+})
+
+//requests using specialRequest() will include the 'x-token' header set in
+//baseRequest and will also include the 'special' header
+const specialRequest = baseRequest.defaults({
+ headers: {special: 'special value'}
+})
+```
+
+### request.METHOD()
+
+These HTTP method convenience functions act just like `request()` but with a default method already set for you:
+
+- *request.get()*: Defaults to `method: "GET"`.
+- *request.post()*: Defaults to `method: "POST"`.
+- *request.put()*: Defaults to `method: "PUT"`.
+- *request.patch()*: Defaults to `method: "PATCH"`.
+- *request.del() / request.delete()*: Defaults to `method: "DELETE"`.
+- *request.head()*: Defaults to `method: "HEAD"`.
+- *request.options()*: Defaults to `method: "OPTIONS"`.
+
+### request.cookie()
+
+Function that creates a new cookie.
+
+```js
+request.cookie('key1=value1')
+```
+### request.jar()
+
+Function that creates a new cookie jar.
+
+```js
+request.jar()
+```
+
+### response.caseless.get('header-name')
+
+Function that returns the specified response header field using a [case-insensitive match](https://tools.ietf.org/html/rfc7230#section-3.2)
+
+```js
+request('http://www.google.com', function (error, response, body) {
+ // print the Content-Type header even if the server returned it as 'content-type' (lowercase)
+ console.log('Content-Type is:', response.caseless.get('Content-Type'));
+});
+```
+
+[back to top](#table-of-contents)
+
+
+---
+
+
+## Debugging
+
+There are at least three ways to debug the operation of `request`:
+
+1. Launch the node process like `NODE_DEBUG=request node script.js`
+ (`lib,request,otherlib` works too).
+
+2. Set `require('request').debug = true` at any time (this does the same thing
+ as #1).
+
+3. Use the [request-debug module](https://github.com/request/request-debug) to
+ view request and response headers and bodies.
+
+[back to top](#table-of-contents)
+
+
+---
+
+## Timeouts
+
+Most requests to external servers should have a timeout attached, in case the
+server is not responding in a timely manner. Without a timeout, your code may
+have a socket open/consume resources for minutes or more.
+
+There are two main types of timeouts: **connection timeouts** and **read
+timeouts**. A connect timeout occurs if the timeout is hit while your client is
+attempting to establish a connection to a remote machine (corresponding to the
+[connect() call][connect] on the socket). A read timeout occurs any time the
+server is too slow to send back a part of the response.
+
+These two situations have widely different implications for what went wrong
+with the request, so it's useful to be able to distinguish them. You can detect
+timeout errors by checking `err.code` for an 'ETIMEDOUT' value. Further, you
+can detect whether the timeout was a connection timeout by checking if the
+`err.connect` property is set to `true`.
+
+```js
+request.get('http://10.255.255.1', {timeout: 1500}, function(err) {
+ console.log(err.code === 'ETIMEDOUT');
+ // Set to `true` if the timeout was a connection timeout, `false` or
+ // `undefined` otherwise.
+ console.log(err.connect === true);
+ process.exit(0);
+});
+```
+
+[connect]: http://linux.die.net/man/2/connect
+
+## Examples:
+
+```js
+ const request = require('request')
+ , rand = Math.floor(Math.random()*100000000).toString()
+ ;
+ request(
+ { method: 'PUT'
+ , uri: 'http://mikeal.iriscouch.com/testjs/' + rand
+ , multipart:
+ [ { 'content-type': 'application/json'
+ , body: JSON.stringify({foo: 'bar', _attachments: {'message.txt': {follows: true, length: 18, 'content_type': 'text/plain' }}})
+ }
+ , { body: 'I am an attachment' }
+ ]
+ }
+ , function (error, response, body) {
+ if(response.statusCode == 201){
+ console.log('document saved as: http://mikeal.iriscouch.com/testjs/'+ rand)
+ } else {
+ console.log('error: '+ response.statusCode)
+ console.log(body)
+ }
+ }
+ )
+```
+
+For backwards-compatibility, response compression is not supported by default.
+To accept gzip-compressed responses, set the `gzip` option to `true`. Note
+that the body data passed through `request` is automatically decompressed
+while the response object is unmodified and will contain compressed data if
+the server sent a compressed response.
+
+```js
+ const request = require('request')
+ request(
+ { method: 'GET'
+ , uri: 'http://www.google.com'
+ , gzip: true
+ }
+ , function (error, response, body) {
+ // body is the decompressed response body
+ console.log('server encoded the data as: ' + (response.headers['content-encoding'] || 'identity'))
+ console.log('the decoded data is: ' + body)
+ }
+ )
+ .on('data', function(data) {
+ // decompressed data as it is received
+ console.log('decoded chunk: ' + data)
+ })
+ .on('response', function(response) {
+ // unmodified http.IncomingMessage object
+ response.on('data', function(data) {
+ // compressed data as it is received
+ console.log('received ' + data.length + ' bytes of compressed data')
+ })
+ })
+```
+
+Cookies are disabled by default (else, they would be used in subsequent requests). To enable cookies, set `jar` to `true` (either in `defaults` or `options`).
+
+```js
+const request = request.defaults({jar: true})
+request('http://www.google.com', function () {
+ request('http://images.google.com')
+})
+```
+
+To use a custom cookie jar (instead of `request`’s global cookie jar), set `jar` to an instance of `request.jar()` (either in `defaults` or `options`)
+
+```js
+const j = request.jar()
+const request = request.defaults({jar:j})
+request('http://www.google.com', function () {
+ request('http://images.google.com')
+})
+```
+
+OR
+
+```js
+const j = request.jar();
+const cookie = request.cookie('key1=value1');
+const url = 'http://www.google.com';
+j.setCookie(cookie, url);
+request({url: url, jar: j}, function () {
+ request('http://images.google.com')
+})
+```
+
+To use a custom cookie store (such as a
+[`FileCookieStore`](https://github.com/mitsuru/tough-cookie-filestore)
+which supports saving to and restoring from JSON files), pass it as a parameter
+to `request.jar()`:
+
+```js
+const FileCookieStore = require('tough-cookie-filestore');
+// NOTE - currently the 'cookies.json' file must already exist!
+const j = request.jar(new FileCookieStore('cookies.json'));
+request = request.defaults({ jar : j })
+request('http://www.google.com', function() {
+ request('http://images.google.com')
+})
+```
+
+The cookie store must be a
+[`tough-cookie`](https://github.com/SalesforceEng/tough-cookie)
+store and it must support synchronous operations; see the
+[`CookieStore` API docs](https://github.com/SalesforceEng/tough-cookie#api)
+for details.
+
+To inspect your cookie jar after a request:
+
+```js
+const j = request.jar()
+request({url: 'http://www.google.com', jar: j}, function () {
+ const cookie_string = j.getCookieString(url); // "key1=value1; key2=value2; ..."
+ const cookies = j.getCookies(url);
+ // [{key: 'key1', value: 'value1', domain: "www.google.com", ...}, ...]
+})
+```
+
+[back to top](#table-of-contents)
diff --git a/familyark/app/node_modules/request/index.js b/familyark/app/node_modules/request/index.js
new file mode 100755
index 0000000..d50f991
--- /dev/null
+++ b/familyark/app/node_modules/request/index.js
@@ -0,0 +1,155 @@
+// Copyright 2010-2012 Mikeal Rogers
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+'use strict'
+
+var extend = require('extend')
+var cookies = require('./lib/cookies')
+var helpers = require('./lib/helpers')
+
+var paramsHaveRequestBody = helpers.paramsHaveRequestBody
+
+// organize params for patch, post, put, head, del
+function initParams (uri, options, callback) {
+ if (typeof options === 'function') {
+ callback = options
+ }
+
+ var params = {}
+ if (options !== null && typeof options === 'object') {
+ extend(params, options, {uri: uri})
+ } else if (typeof uri === 'string') {
+ extend(params, {uri: uri})
+ } else {
+ extend(params, uri)
+ }
+
+ params.callback = callback || params.callback
+ return params
+}
+
+function request (uri, options, callback) {
+ if (typeof uri === 'undefined') {
+ throw new Error('undefined is not a valid uri or options object.')
+ }
+
+ var params = initParams(uri, options, callback)
+
+ if (params.method === 'HEAD' && paramsHaveRequestBody(params)) {
+ throw new Error('HTTP HEAD requests MUST NOT include a request body.')
+ }
+
+ return new request.Request(params)
+}
+
+function verbFunc (verb) {
+ var method = verb.toUpperCase()
+ return function (uri, options, callback) {
+ var params = initParams(uri, options, callback)
+ params.method = method
+ return request(params, params.callback)
+ }
+}
+
+// define like this to please codeintel/intellisense IDEs
+request.get = verbFunc('get')
+request.head = verbFunc('head')
+request.options = verbFunc('options')
+request.post = verbFunc('post')
+request.put = verbFunc('put')
+request.patch = verbFunc('patch')
+request.del = verbFunc('delete')
+request['delete'] = verbFunc('delete')
+
+request.jar = function (store) {
+ return cookies.jar(store)
+}
+
+request.cookie = function (str) {
+ return cookies.parse(str)
+}
+
+function wrapRequestMethod (method, options, requester, verb) {
+ return function (uri, opts, callback) {
+ var params = initParams(uri, opts, callback)
+
+ var target = {}
+ extend(true, target, options, params)
+
+ target.pool = params.pool || options.pool
+
+ if (verb) {
+ target.method = verb.toUpperCase()
+ }
+
+ if (typeof requester === 'function') {
+ method = requester
+ }
+
+ return method(target, target.callback)
+ }
+}
+
+request.defaults = function (options, requester) {
+ var self = this
+
+ options = options || {}
+
+ if (typeof options === 'function') {
+ requester = options
+ options = {}
+ }
+
+ var defaults = wrapRequestMethod(self, options, requester)
+
+ var verbs = ['get', 'head', 'post', 'put', 'patch', 'del', 'delete']
+ verbs.forEach(function (verb) {
+ defaults[verb] = wrapRequestMethod(self[verb], options, requester, verb)
+ })
+
+ defaults.cookie = wrapRequestMethod(self.cookie, options, requester)
+ defaults.jar = self.jar
+ defaults.defaults = self.defaults
+ return defaults
+}
+
+request.forever = function (agentOptions, optionsArg) {
+ var options = {}
+ if (optionsArg) {
+ extend(options, optionsArg)
+ }
+ if (agentOptions) {
+ options.agentOptions = agentOptions
+ }
+
+ options.forever = true
+ return request.defaults(options)
+}
+
+// Exports
+
+module.exports = request
+request.Request = require('./request')
+request.initParams = initParams
+
+// Backwards compatibility for request.debug
+Object.defineProperty(request, 'debug', {
+ enumerable: true,
+ get: function () {
+ return request.Request.debug
+ },
+ set: function (debug) {
+ request.Request.debug = debug
+ }
+})
diff --git a/familyark/app/node_modules/request/lib/auth.js b/familyark/app/node_modules/request/lib/auth.js
new file mode 100644
index 0000000..02f2038
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/auth.js
@@ -0,0 +1,167 @@
+'use strict'
+
+var caseless = require('caseless')
+var uuid = require('uuid/v4')
+var helpers = require('./helpers')
+
+var md5 = helpers.md5
+var toBase64 = helpers.toBase64
+
+function Auth (request) {
+ // define all public properties here
+ this.request = request
+ this.hasAuth = false
+ this.sentAuth = false
+ this.bearerToken = null
+ this.user = null
+ this.pass = null
+}
+
+Auth.prototype.basic = function (user, pass, sendImmediately) {
+ var self = this
+ if (typeof user !== 'string' || (pass !== undefined && typeof pass !== 'string')) {
+ self.request.emit('error', new Error('auth() received invalid user or password'))
+ }
+ self.user = user
+ self.pass = pass
+ self.hasAuth = true
+ var header = user + ':' + (pass || '')
+ if (sendImmediately || typeof sendImmediately === 'undefined') {
+ var authHeader = 'Basic ' + toBase64(header)
+ self.sentAuth = true
+ return authHeader
+ }
+}
+
+Auth.prototype.bearer = function (bearer, sendImmediately) {
+ var self = this
+ self.bearerToken = bearer
+ self.hasAuth = true
+ if (sendImmediately || typeof sendImmediately === 'undefined') {
+ if (typeof bearer === 'function') {
+ bearer = bearer()
+ }
+ var authHeader = 'Bearer ' + (bearer || '')
+ self.sentAuth = true
+ return authHeader
+ }
+}
+
+Auth.prototype.digest = function (method, path, authHeader) {
+ // TODO: More complete implementation of RFC 2617.
+ // - handle challenge.domain
+ // - support qop="auth-int" only
+ // - handle Authentication-Info (not necessarily?)
+ // - check challenge.stale (not necessarily?)
+ // - increase nc (not necessarily?)
+ // For reference:
+ // http://tools.ietf.org/html/rfc2617#section-3
+ // https://github.com/bagder/curl/blob/master/lib/http_digest.c
+
+ var self = this
+
+ var challenge = {}
+ var re = /([a-z0-9_-]+)=(?:"([^"]+)"|([a-z0-9_-]+))/gi
+ while (true) {
+ var match = re.exec(authHeader)
+ if (!match) {
+ break
+ }
+ challenge[match[1]] = match[2] || match[3]
+ }
+
+ /**
+ * RFC 2617: handle both MD5 and MD5-sess algorithms.
+ *
+ * If the algorithm directive's value is "MD5" or unspecified, then HA1 is
+ * HA1=MD5(username:realm:password)
+ * If the algorithm directive's value is "MD5-sess", then HA1 is
+ * HA1=MD5(MD5(username:realm:password):nonce:cnonce)
+ */
+ var ha1Compute = function (algorithm, user, realm, pass, nonce, cnonce) {
+ var ha1 = md5(user + ':' + realm + ':' + pass)
+ if (algorithm && algorithm.toLowerCase() === 'md5-sess') {
+ return md5(ha1 + ':' + nonce + ':' + cnonce)
+ } else {
+ return ha1
+ }
+ }
+
+ var qop = /(^|,)\s*auth\s*($|,)/.test(challenge.qop) && 'auth'
+ var nc = qop && '00000001'
+ var cnonce = qop && uuid().replace(/-/g, '')
+ var ha1 = ha1Compute(challenge.algorithm, self.user, challenge.realm, self.pass, challenge.nonce, cnonce)
+ var ha2 = md5(method + ':' + path)
+ var digestResponse = qop
+ ? md5(ha1 + ':' + challenge.nonce + ':' + nc + ':' + cnonce + ':' + qop + ':' + ha2)
+ : md5(ha1 + ':' + challenge.nonce + ':' + ha2)
+ var authValues = {
+ username: self.user,
+ realm: challenge.realm,
+ nonce: challenge.nonce,
+ uri: path,
+ qop: qop,
+ response: digestResponse,
+ nc: nc,
+ cnonce: cnonce,
+ algorithm: challenge.algorithm,
+ opaque: challenge.opaque
+ }
+
+ authHeader = []
+ for (var k in authValues) {
+ if (authValues[k]) {
+ if (k === 'qop' || k === 'nc' || k === 'algorithm') {
+ authHeader.push(k + '=' + authValues[k])
+ } else {
+ authHeader.push(k + '="' + authValues[k] + '"')
+ }
+ }
+ }
+ authHeader = 'Digest ' + authHeader.join(', ')
+ self.sentAuth = true
+ return authHeader
+}
+
+Auth.prototype.onRequest = function (user, pass, sendImmediately, bearer) {
+ var self = this
+ var request = self.request
+
+ var authHeader
+ if (bearer === undefined && user === undefined) {
+ self.request.emit('error', new Error('no auth mechanism defined'))
+ } else if (bearer !== undefined) {
+ authHeader = self.bearer(bearer, sendImmediately)
+ } else {
+ authHeader = self.basic(user, pass, sendImmediately)
+ }
+ if (authHeader) {
+ request.setHeader('authorization', authHeader)
+ }
+}
+
+Auth.prototype.onResponse = function (response) {
+ var self = this
+ var request = self.request
+
+ if (!self.hasAuth || self.sentAuth) { return null }
+
+ var c = caseless(response.headers)
+
+ var authHeader = c.get('www-authenticate')
+ var authVerb = authHeader && authHeader.split(' ')[0].toLowerCase()
+ request.debug('reauth', authVerb)
+
+ switch (authVerb) {
+ case 'basic':
+ return self.basic(self.user, self.pass, true)
+
+ case 'bearer':
+ return self.bearer(self.bearerToken, true)
+
+ case 'digest':
+ return self.digest(request.method, request.path, authHeader)
+ }
+}
+
+exports.Auth = Auth
diff --git a/familyark/app/node_modules/request/lib/cookies.js b/familyark/app/node_modules/request/lib/cookies.js
new file mode 100644
index 0000000..bd5d46b
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/cookies.js
@@ -0,0 +1,38 @@
+'use strict'
+
+var tough = require('tough-cookie')
+
+var Cookie = tough.Cookie
+var CookieJar = tough.CookieJar
+
+exports.parse = function (str) {
+ if (str && str.uri) {
+ str = str.uri
+ }
+ if (typeof str !== 'string') {
+ throw new Error('The cookie function only accepts STRING as param')
+ }
+ return Cookie.parse(str, {loose: true})
+}
+
+// Adapt the sometimes-Async api of tough.CookieJar to our requirements
+function RequestJar (store) {
+ var self = this
+ self._jar = new CookieJar(store, {looseMode: true})
+}
+RequestJar.prototype.setCookie = function (cookieOrStr, uri, options) {
+ var self = this
+ return self._jar.setCookieSync(cookieOrStr, uri, options || {})
+}
+RequestJar.prototype.getCookieString = function (uri) {
+ var self = this
+ return self._jar.getCookieStringSync(uri)
+}
+RequestJar.prototype.getCookies = function (uri) {
+ var self = this
+ return self._jar.getCookiesSync(uri)
+}
+
+exports.jar = function (store) {
+ return new RequestJar(store)
+}
diff --git a/familyark/app/node_modules/request/lib/getProxyFromURI.js b/familyark/app/node_modules/request/lib/getProxyFromURI.js
new file mode 100644
index 0000000..0b9b18e
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/getProxyFromURI.js
@@ -0,0 +1,79 @@
+'use strict'
+
+function formatHostname (hostname) {
+ // canonicalize the hostname, so that 'oogle.com' won't match 'google.com'
+ return hostname.replace(/^\.*/, '.').toLowerCase()
+}
+
+function parseNoProxyZone (zone) {
+ zone = zone.trim().toLowerCase()
+
+ var zoneParts = zone.split(':', 2)
+ var zoneHost = formatHostname(zoneParts[0])
+ var zonePort = zoneParts[1]
+ var hasPort = zone.indexOf(':') > -1
+
+ return {hostname: zoneHost, port: zonePort, hasPort: hasPort}
+}
+
+function uriInNoProxy (uri, noProxy) {
+ var port = uri.port || (uri.protocol === 'https:' ? '443' : '80')
+ var hostname = formatHostname(uri.hostname)
+ var noProxyList = noProxy.split(',')
+
+ // iterate through the noProxyList until it finds a match.
+ return noProxyList.map(parseNoProxyZone).some(function (noProxyZone) {
+ var isMatchedAt = hostname.indexOf(noProxyZone.hostname)
+ var hostnameMatched = (
+ isMatchedAt > -1 &&
+ (isMatchedAt === hostname.length - noProxyZone.hostname.length)
+ )
+
+ if (noProxyZone.hasPort) {
+ return (port === noProxyZone.port) && hostnameMatched
+ }
+
+ return hostnameMatched
+ })
+}
+
+function getProxyFromURI (uri) {
+ // Decide the proper request proxy to use based on the request URI object and the
+ // environmental variables (NO_PROXY, HTTP_PROXY, etc.)
+ // respect NO_PROXY environment variables (see: https://lynx.invisible-island.net/lynx2.8.7/breakout/lynx_help/keystrokes/environments.html)
+
+ var noProxy = process.env.NO_PROXY || process.env.no_proxy || ''
+
+ // if the noProxy is a wildcard then return null
+
+ if (noProxy === '*') {
+ return null
+ }
+
+ // if the noProxy is not empty and the uri is found return null
+
+ if (noProxy !== '' && uriInNoProxy(uri, noProxy)) {
+ return null
+ }
+
+ // Check for HTTP or HTTPS Proxy in environment Else default to null
+
+ if (uri.protocol === 'http:') {
+ return process.env.HTTP_PROXY ||
+ process.env.http_proxy || null
+ }
+
+ if (uri.protocol === 'https:') {
+ return process.env.HTTPS_PROXY ||
+ process.env.https_proxy ||
+ process.env.HTTP_PROXY ||
+ process.env.http_proxy || null
+ }
+
+ // if none of that works, return null
+ // (What uri protocol are you using then?)
+
+ return null
+}
+
+module.exports = getProxyFromURI
diff --git a/familyark/app/node_modules/request/lib/har.js b/familyark/app/node_modules/request/lib/har.js
new file mode 100644
index 0000000..0dedee4
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/har.js
@@ -0,0 +1,205 @@
+'use strict'
+
+var fs = require('fs')
+var qs = require('querystring')
+var validate = require('har-validator')
+var extend = require('extend')
+
+function Har (request) {
+ this.request = request
+}
+
+Har.prototype.reducer = function (obj, pair) {
+ // new property ?
+ if (obj[pair.name] === undefined) {
+ obj[pair.name] = pair.value
+ return obj
+ }
+
+ // existing? convert to array
+ var arr = [
+ obj[pair.name],
+ pair.value
+ ]
+
+ obj[pair.name] = arr
+
+ return obj
+}
+
+Har.prototype.prep = function (data) {
+ // construct utility properties
+ data.queryObj = {}
+ data.headersObj = {}
+ data.postData.jsonObj = false
+ data.postData.paramsObj = false
+
+ // construct query objects
+ if (data.queryString && data.queryString.length) {
+ data.queryObj = data.queryString.reduce(this.reducer, {})
+ }
+
+ // construct headers objects
+ if (data.headers && data.headers.length) {
+ // loweCase header keys
+ data.headersObj = data.headers.reduceRight(function (headers, header) {
+ headers[header.name] = header.value
+ return headers
+ }, {})
+ }
+
+ // construct Cookie header
+ if (data.cookies && data.cookies.length) {
+ var cookies = data.cookies.map(function (cookie) {
+ return cookie.name + '=' + cookie.value
+ })
+
+ if (cookies.length) {
+ data.headersObj.cookie = cookies.join('; ')
+ }
+ }
+
+ // prep body
+ function some (arr) {
+ return arr.some(function (type) {
+ return data.postData.mimeType.indexOf(type) === 0
+ })
+ }
+
+ if (some([
+ 'multipart/mixed',
+ 'multipart/related',
+ 'multipart/form-data',
+ 'multipart/alternative'])) {
+ // reset values
+ data.postData.mimeType = 'multipart/form-data'
+ } else if (some([
+ 'application/x-www-form-urlencoded'])) {
+ if (!data.postData.params) {
+ data.postData.text = ''
+ } else {
+ data.postData.paramsObj = data.postData.params.reduce(this.reducer, {})
+
+ // always overwrite
+ data.postData.text = qs.stringify(data.postData.paramsObj)
+ }
+ } else if (some([
+ 'text/json',
+ 'text/x-json',
+ 'application/json',
+ 'application/x-json'])) {
+ data.postData.mimeType = 'application/json'
+
+ if (data.postData.text) {
+ try {
+ data.postData.jsonObj = JSON.parse(data.postData.text)
+ } catch (e) {
+ this.request.debug(e)
+
+ // force back to text/plain
+ data.postData.mimeType = 'text/plain'
+ }
+ }
+ }
+
+ return data
+}
+
+Har.prototype.options = function (options) {
+ // skip if no har property defined
+ if (!options.har) {
+ return options
+ }
+
+ var har = {}
+ extend(har, options.har)
+
+ // only process the first entry
+ if (har.log && har.log.entries) {
+ har = har.log.entries[0]
+ }
+
+ // add optional properties to make validation successful
+ har.url = har.url || options.url || options.uri || options.baseUrl || '/'
+ har.httpVersion = har.httpVersion || 'HTTP/1.1'
+ har.queryString = har.queryString || []
+ har.headers = har.headers || []
+ har.cookies = har.cookies || []
+ har.postData = har.postData || {}
+ har.postData.mimeType = har.postData.mimeType || 'application/octet-stream'
+
+ har.bodySize = 0
+ har.headersSize = 0
+ har.postData.size = 0
+
+ if (!validate.request(har)) {
+ return options
+ }
+
+ // clean up and get some utility properties
+ var req = this.prep(har)
+
+ // construct new options
+ if (req.url) {
+ options.url = req.url
+ }
+
+ if (req.method) {
+ options.method = req.method
+ }
+
+ if (Object.keys(req.queryObj).length) {
+ options.qs = req.queryObj
+ }
+
+ if (Object.keys(req.headersObj).length) {
+ options.headers = req.headersObj
+ }
+
+ function test (type) {
+ return req.postData.mimeType.indexOf(type) === 0
+ }
+ if (test('application/x-www-form-urlencoded')) {
+ options.form = req.postData.paramsObj
+ } else if (test('application/json')) {
+ if (req.postData.jsonObj) {
+ options.body = req.postData.jsonObj
+ options.json = true
+ }
+ } else if (test('multipart/form-data')) {
+ options.formData = {}
+
+ req.postData.params.forEach(function (param) {
+ var attachment = {}
+
+ if (!param.fileName && !param.contentType) {
+ options.formData[param.name] = param.value
+ return
+ }
+
+ // attempt to read from disk!
+ if (param.fileName && !param.value) {
+ attachment.value = fs.createReadStream(param.fileName)
+ } else if (param.value) {
+ attachment.value = param.value
+ }
+
+ if (param.fileName) {
+ attachment.options = {
+ filename: param.fileName,
+ contentType: param.contentType ? param.contentType : null
+ }
+ }
+
+ options.formData[param.name] = attachment
+ })
+ } else {
+ if (req.postData.text) {
+ options.body = req.postData.text
+ }
+ }
+
+ return options
+}
+
+exports.Har = Har
diff --git a/familyark/app/node_modules/request/lib/hawk.js b/familyark/app/node_modules/request/lib/hawk.js
new file mode 100644
index 0000000..de48a98
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/hawk.js
@@ -0,0 +1,89 @@
+'use strict'
+
+var crypto = require('crypto')
+
+function randomString (size) {
+ var bits = (size + 1) * 6
+ var buffer = crypto.randomBytes(Math.ceil(bits / 8))
+ var string = buffer.toString('base64').replace(/\+/g, '-').replace(/\//g, '_').replace(/=/g, '')
+ return string.slice(0, size)
+}
+
+function calculatePayloadHash (payload, algorithm, contentType) {
+ var hash = crypto.createHash(algorithm)
+ hash.update('hawk.1.payload\n')
+ hash.update((contentType ? contentType.split(';')[0].trim().toLowerCase() : '') + '\n')
+ hash.update(payload || '')
+ hash.update('\n')
+ return hash.digest('base64')
+}
+
+exports.calculateMac = function (credentials, opts) {
+ var normalized = 'hawk.1.header\n' +
+ opts.ts + '\n' +
+ opts.nonce + '\n' +
+ (opts.method || '').toUpperCase() + '\n' +
+ opts.resource + '\n' +
+ opts.host.toLowerCase() + '\n' +
+ opts.port + '\n' +
+ (opts.hash || '') + '\n'
+
+ if (opts.ext) {
+ normalized = normalized + opts.ext.replace('\\', '\\\\').replace('\n', '\\n')
+ }
+
+ normalized = normalized + '\n'
+
+ if (opts.app) {
+ normalized = normalized + opts.app + '\n' + (opts.dlg || '') + '\n'
+ }
+
+ var hmac = crypto.createHmac(credentials.algorithm, credentials.key).update(normalized)
+ var digest = hmac.digest('base64')
+ return digest
+}
+
+exports.header = function (uri, method, opts) {
+ var timestamp = opts.timestamp || Math.floor((Date.now() + (opts.localtimeOffsetMsec || 0)) / 1000)
+ var credentials = opts.credentials
+ if (!credentials || !credentials.id || !credentials.key || !credentials.algorithm) {
+ return ''
+ }
+
+ if (['sha1', 'sha256'].indexOf(credentials.algorithm) === -1) {
+ return ''
+ }
+
+ var artifacts = {
+ ts: timestamp,
+ nonce: opts.nonce || randomString(6),
+ method: method,
+ resource: uri.pathname + (uri.search || ''),
+ host: uri.hostname,
+ port: uri.port || (uri.protocol === 'http:' ? 80 : 443),
+ hash: opts.hash,
+ ext: opts.ext,
+ app: opts.app,
+ dlg: opts.dlg
+ }
+
+ if (!artifacts.hash && (opts.payload || opts.payload === '')) {
+ artifacts.hash = calculatePayloadHash(opts.payload, credentials.algorithm, opts.contentType)
+ }
+
+ var mac = exports.calculateMac(credentials, artifacts)
+
+ var hasExt = artifacts.ext !== null && artifacts.ext !== undefined && artifacts.ext !== ''
+ var header = 'Hawk id="' + credentials.id +
+ '", ts="' + artifacts.ts +
+ '", nonce="' + artifacts.nonce +
+ (artifacts.hash ? '", hash="' + artifacts.hash : '') +
+ (hasExt ? '", ext="' + artifacts.ext.replace(/\\/g, '\\\\').replace(/"/g, '\\"') : '') +
+ '", mac="' + mac + '"'
+
+ if (artifacts.app) {
+ header = header + ', app="' + artifacts.app + (artifacts.dlg ? '", dlg="' + artifacts.dlg : '') + '"'
+ }
+
+ return header
+}
diff --git a/familyark/app/node_modules/request/lib/helpers.js b/familyark/app/node_modules/request/lib/helpers.js
new file mode 100644
index 0000000..8b2a7e6
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/helpers.js
@@ -0,0 +1,66 @@
+'use strict'
+
+var jsonSafeStringify = require('json-stringify-safe')
+var crypto = require('crypto')
+var Buffer = require('safe-buffer').Buffer
+
+var defer = typeof setImmediate === 'undefined'
+ ? process.nextTick
+ : setImmediate
+
+function paramsHaveRequestBody (params) {
+ return (
+ params.body ||
+ params.requestBodyStream ||
+ (params.json && typeof params.json !== 'boolean') ||
+ params.multipart
+ )
+}
+
+function safeStringify (obj, replacer) {
+ var ret
+ try {
+ ret = JSON.stringify(obj, replacer)
+ } catch (e) {
+ ret = jsonSafeStringify(obj, replacer)
+ }
+ return ret
+}
+
+function md5 (str) {
+ return crypto.createHash('md5').update(str).digest('hex')
+}
+
+function isReadStream (rs) {
+ return rs.readable && rs.path && rs.mode
+}
+
+function toBase64 (str) {
+ return Buffer.from(str || '', 'utf8').toString('base64')
+}
+
+function copy (obj) {
+ var o = {}
+ Object.keys(obj).forEach(function (i) {
+ o[i] = obj[i]
+ })
+ return o
+}
+
+function version () {
+ var numbers = process.version.replace('v', '').split('.')
+ return {
+ major: parseInt(numbers[0], 10),
+ minor: parseInt(numbers[1], 10),
+ patch: parseInt(numbers[2], 10)
+ }
+}
+
+exports.paramsHaveRequestBody = paramsHaveRequestBody
+exports.safeStringify = safeStringify
+exports.md5 = md5
+exports.isReadStream = isReadStream
+exports.toBase64 = toBase64
+exports.copy = copy
+exports.version = version
+exports.defer = defer
diff --git a/familyark/app/node_modules/request/lib/multipart.js b/familyark/app/node_modules/request/lib/multipart.js
new file mode 100644
index 0000000..6a009bc
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/multipart.js
@@ -0,0 +1,112 @@
+'use strict'
+
+var uuid = require('uuid/v4')
+var CombinedStream = require('combined-stream')
+var isstream = require('isstream')
+var Buffer = require('safe-buffer').Buffer
+
+function Multipart (request) {
+ this.request = request
+ this.boundary = uuid()
+ this.chunked = false
+ this.body = null
+}
+
+Multipart.prototype.isChunked = function (options) {
+ var self = this
+ var chunked = false
+ var parts = options.data || options
+
+ if (!parts.forEach) {
+ self.request.emit('error', new Error('Argument error, options.multipart.'))
+ }
+
+ if (options.chunked !== undefined) {
+ chunked = options.chunked
+ }
+
+ if (self.request.getHeader('transfer-encoding') === 'chunked') {
+ chunked = true
+ }
+
+ if (!chunked) {
+ parts.forEach(function (part) {
+ if (typeof part.body === 'undefined') {
+ self.request.emit('error', new Error('Body attribute missing in multipart.'))
+ }
+ if (isstream(part.body)) {
+ chunked = true
+ }
+ })
+ }
+
+ return chunked
+}
+
+Multipart.prototype.setHeaders = function (chunked) {
+ var self = this
+
+ if (chunked && !self.request.hasHeader('transfer-encoding')) {
+ self.request.setHeader('transfer-encoding', 'chunked')
+ }
+
+ var header = self.request.getHeader('content-type')
+
+ if (!header || header.indexOf('multipart') === -1) {
+ self.request.setHeader('content-type', 'multipart/related; boundary=' + self.boundary)
+ } else {
+ if (header.indexOf('boundary') !== -1) {
+ self.boundary = header.replace(/.*boundary=([^\s;]+).*/, '$1')
+ } else {
+ self.request.setHeader('content-type', header + '; boundary=' + self.boundary)
+ }
+ }
+}
+
+Multipart.prototype.build = function (parts, chunked) {
+ var self = this
+ var body = chunked ? new CombinedStream() : []
+
+ function add (part) {
+ if (typeof part === 'number') {
+ part = part.toString()
+ }
+ return chunked ? body.append(part) : body.push(Buffer.from(part))
+ }
+
+ if (self.request.preambleCRLF) {
+ add('\r\n')
+ }
+
+ parts.forEach(function (part) {
+ var preamble = '--' + self.boundary + '\r\n'
+ Object.keys(part).forEach(function (key) {
+ if (key === 'body') { return }
+ preamble += key + ': ' + part[key] + '\r\n'
+ })
+ preamble += '\r\n'
+ add(preamble)
+ add(part.body)
+ add('\r\n')
+ })
+ add('--' + self.boundary + '--')
+
+ if (self.request.postambleCRLF) {
+ add('\r\n')
+ }
+
+ return body
+}
+
+Multipart.prototype.onRequest = function (options) {
+ var self = this
+
+ var chunked = self.isChunked(options)
+ var parts = options.data || options
+
+ self.setHeaders(chunked)
+ self.chunked = chunked
+ self.body = self.build(parts, chunked)
+}
+
+exports.Multipart = Multipart
diff --git a/familyark/app/node_modules/request/lib/oauth.js b/familyark/app/node_modules/request/lib/oauth.js
new file mode 100644
index 0000000..96de72b
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/oauth.js
@@ -0,0 +1,148 @@
+'use strict'
+
+var url = require('url')
+var qs = require('qs')
+var caseless = require('caseless')
+var uuid = require('uuid/v4')
+var oauth = require('oauth-sign')
+var crypto = require('crypto')
+var Buffer = require('safe-buffer').Buffer
+
+function OAuth (request) {
+ this.request = request
+ this.params = null
+}
+
+OAuth.prototype.buildParams = function (_oauth, uri, method, query, form, qsLib) {
+ var oa = {}
+ for (var i in _oauth) {
+ oa['oauth_' + i] = _oauth[i]
+ }
+ if (!oa.oauth_version) {
+ oa.oauth_version = '1.0'
+ }
+ if (!oa.oauth_timestamp) {
+ oa.oauth_timestamp = Math.floor(Date.now() / 1000).toString()
+ }
+ if (!oa.oauth_nonce) {
+ oa.oauth_nonce = uuid().replace(/-/g, '')
+ }
+ if (!oa.oauth_signature_method) {
+ oa.oauth_signature_method = 'HMAC-SHA1'
+ }
+
+ var consumer_secret_or_private_key = oa.oauth_consumer_secret || oa.oauth_private_key // eslint-disable-line camelcase
+ delete oa.oauth_consumer_secret
+ delete oa.oauth_private_key
+
+ var token_secret = oa.oauth_token_secret // eslint-disable-line camelcase
+ delete oa.oauth_token_secret
+
+ var realm = oa.oauth_realm
+ delete oa.oauth_realm
+ delete oa.oauth_transport_method
+
+ var baseurl = uri.protocol + '//' + uri.host + uri.pathname
+ var params = qsLib.parse([].concat(query, form, qsLib.stringify(oa)).join('&'))
+
+ oa.oauth_signature = oauth.sign(
+ oa.oauth_signature_method,
+ method,
+ baseurl,
+ params,
+ consumer_secret_or_private_key, // eslint-disable-line camelcase
+ token_secret // eslint-disable-line camelcase
+ )
+
+ if (realm) {
+ oa.realm = realm
+ }
+
+ return oa
+}
+
+OAuth.prototype.buildBodyHash = function (_oauth, body) {
+ if (['HMAC-SHA1', 'RSA-SHA1'].indexOf(_oauth.signature_method || 'HMAC-SHA1') < 0) {
+ this.request.emit('error', new Error('oauth: ' + _oauth.signature_method +
+ ' signature_method not supported with body_hash signing.'))
+ }
+
+ var shasum = crypto.createHash('sha1')
+ shasum.update(body || '')
+ var sha1 = shasum.digest('hex')
+
+ return Buffer.from(sha1, 'hex').toString('base64')
+}
+
+OAuth.prototype.concatParams = function (oa, sep, wrap) {
+ wrap = wrap || ''
+
+ var params = Object.keys(oa).filter(function (i) {
+ return i !== 'realm' && i !== 'oauth_signature'
+ }).sort()
+
+ if (oa.realm) {
+ params.splice(0, 0, 'realm')
+ }
+ params.push('oauth_signature')
+
+ return params.map(function (i) {
+ return i + '=' + wrap + oauth.rfc3986(oa[i]) + wrap
+ }).join(sep)
+}
+
+OAuth.prototype.onRequest = function (_oauth) {
+ var self = this
+ self.params = _oauth
+
+ var uri = self.request.uri || {}
+ var method = self.request.method || ''
+ var headers = caseless(self.request.headers)
+ var body = self.request.body || ''
+ var qsLib = self.request.qsLib || qs
+
+ var form
+ var query
+ var contentType = headers.get('content-type') || ''
+ var formContentType = 'application/x-www-form-urlencoded'
+ var transport = _oauth.transport_method || 'header'
+
+ if (contentType.slice(0, formContentType.length) === formContentType) {
+ contentType = formContentType
+ form = body
+ }
+ if (uri.query) {
+ query = uri.query
+ }
+ if (transport === 'body' && (method !== 'POST' || contentType !== formContentType)) {
+ self.request.emit('error', new Error('oauth: transport_method of body requires POST ' +
+ 'and content-type ' + formContentType))
+ }
+
+ if (!form && typeof _oauth.body_hash === 'boolean') {
+ _oauth.body_hash = self.buildBodyHash(_oauth, self.request.body.toString())
+ }
+
+ var oa = self.buildParams(_oauth, uri, method, query, form, qsLib)
+
+ switch (transport) {
+ case 'header':
+ self.request.setHeader('Authorization', 'OAuth ' + self.concatParams(oa, ',', '"'))
+ break
+
+ case 'query':
+ var href = self.request.uri.href += (query ? '&' : '?') + self.concatParams(oa, '&')
+ self.request.uri = url.parse(href)
+ self.request.path = self.request.uri.path
+ break
+
+ case 'body':
+ self.request.body = (form ? form + '&' : '') + self.concatParams(oa, '&')
+ break
+
+ default:
+ self.request.emit('error', new Error('oauth: transport_method invalid'))
+ }
+}
+
+exports.OAuth = OAuth
diff --git a/familyark/app/node_modules/request/lib/querystring.js b/familyark/app/node_modules/request/lib/querystring.js
new file mode 100644
index 0000000..4a32cd1
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/querystring.js
@@ -0,0 +1,50 @@
+'use strict'
+
+var qs = require('qs')
+var querystring = require('querystring')
+
+function Querystring (request) {
+ this.request = request
+ this.lib = null
+ this.useQuerystring = null
+ this.parseOptions = null
+ this.stringifyOptions = null
+}
+
+Querystring.prototype.init = function (options) {
+ if (this.lib) { return }
+
+ this.useQuerystring = options.useQuerystring
+ this.lib = (this.useQuerystring ? querystring : qs)
+
+ this.parseOptions = options.qsParseOptions || {}
+ this.stringifyOptions = options.qsStringifyOptions || {}
+}
+
+Querystring.prototype.stringify = function (obj) {
+ return (this.useQuerystring)
+ ? this.rfc3986(this.lib.stringify(obj,
+ this.stringifyOptions.sep || null,
+ this.stringifyOptions.eq || null,
+ this.stringifyOptions))
+ : this.lib.stringify(obj, this.stringifyOptions)
+}
+
+Querystring.prototype.parse = function (str) {
+ return (this.useQuerystring)
+ ? this.lib.parse(str,
+ this.parseOptions.sep || null,
+ this.parseOptions.eq || null,
+ this.parseOptions)
+ : this.lib.parse(str, this.parseOptions)
+}
+
+Querystring.prototype.rfc3986 = function (str) {
+ return str.replace(/[!'()*]/g, function (c) {
+ return '%' + c.charCodeAt(0).toString(16).toUpperCase()
+ })
+}
+
+Querystring.prototype.unescape = querystring.unescape
+
+exports.Querystring = Querystring
diff --git a/familyark/app/node_modules/request/lib/redirect.js b/familyark/app/node_modules/request/lib/redirect.js
new file mode 100644
index 0000000..b9150e7
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/redirect.js
@@ -0,0 +1,154 @@
+'use strict'
+
+var url = require('url')
+var isUrl = /^https?:/
+
+function Redirect (request) {
+ this.request = request
+ this.followRedirect = true
+ this.followRedirects = true
+ this.followAllRedirects = false
+ this.followOriginalHttpMethod = false
+ this.allowRedirect = function () { return true }
+ this.maxRedirects = 10
+ this.redirects = []
+ this.redirectsFollowed = 0
+ this.removeRefererHeader = false
+}
+
+Redirect.prototype.onRequest = function (options) {
+ var self = this
+
+ if (options.maxRedirects !== undefined) {
+ self.maxRedirects = options.maxRedirects
+ }
+ if (typeof options.followRedirect === 'function') {
+ self.allowRedirect = options.followRedirect
+ }
+ if (options.followRedirect !== undefined) {
+ self.followRedirects = !!options.followRedirect
+ }
+ if (options.followAllRedirects !== undefined) {
+ self.followAllRedirects = options.followAllRedirects
+ }
+ if (self.followRedirects || self.followAllRedirects) {
+ self.redirects = self.redirects || []
+ }
+ if (options.removeRefererHeader !== undefined) {
+ self.removeRefererHeader = options.removeRefererHeader
+ }
+ if (options.followOriginalHttpMethod !== undefined) {
+ self.followOriginalHttpMethod = options.followOriginalHttpMethod
+ }
+}
+
+Redirect.prototype.redirectTo = function (response) {
+ var self = this
+ var request = self.request
+
+ var redirectTo = null
+ if (response.statusCode >= 300 && response.statusCode < 400 && response.caseless.has('location')) {
+ var location = response.caseless.get('location')
+ request.debug('redirect', location)
+
+ if (self.followAllRedirects) {
+ redirectTo = location
+ } else if (self.followRedirects) {
+ switch (request.method) {
+ case 'PATCH':
+ case 'PUT':
+ case 'POST':
+ case 'DELETE':
+ // Do not follow redirects
+ break
+ default:
+ redirectTo = location
+ break
+ }
+ }
+ } else if (response.statusCode === 401) {
+ var authHeader = request._auth.onResponse(response)
+ if (authHeader) {
+ request.setHeader('authorization', authHeader)
+ redirectTo = request.uri
+ }
+ }
+ return redirectTo
+}
+
+Redirect.prototype.onResponse = function (response) {
+ var self = this
+ var request = self.request
+
+ var redirectTo = self.redirectTo(response)
+ if (!redirectTo || !self.allowRedirect.call(request, response)) {
+ return false
+ }
+
+ request.debug('redirect to', redirectTo)
+
+ // ignore any potential response body. it cannot possibly be useful
+ // to us at this point.
+ // response.resume should be defined, but check anyway before calling. Workaround for browserify.
+ if (response.resume) {
+ response.resume()
+ }
+
+ if (self.redirectsFollowed >= self.maxRedirects) {
+ request.emit('error', new Error('Exceeded maxRedirects. Probably stuck in a redirect loop ' + request.uri.href))
+ return false
+ }
+ self.redirectsFollowed += 1
+
+ if (!isUrl.test(redirectTo)) {
+ redirectTo = url.resolve(request.uri.href, redirectTo)
+ }
+
+ var uriPrev = request.uri
+ request.uri = url.parse(redirectTo)
+
+ // handle the case where we change protocol from https to http or vice versa
+ if (request.uri.protocol !== uriPrev.protocol) {
+ delete request.agent
+ }
+
+ self.redirects.push({ statusCode: response.statusCode, redirectUri: redirectTo })
+
+ if (self.followAllRedirects && request.method !== 'HEAD' &&
+ response.statusCode !== 401 && response.statusCode !== 307) {
+ request.method = self.followOriginalHttpMethod ? request.method : 'GET'
+ }
+ // request.method = 'GET' // Force all redirects to use GET || commented out fixes #215
+ delete request.src
+ delete request.req
+ delete request._started
+ if (response.statusCode !== 401 && response.statusCode !== 307) {
+ // Remove parameters from the previous response, unless this is the second request
+ // for a server that requires digest authentication.
+ delete request.body
+ delete request._form
+ if (request.headers) {
+ request.removeHeader('host')
+ request.removeHeader('content-type')
+ request.removeHeader('content-length')
+ if (request.uri.hostname !== request.originalHost.split(':')[0]) {
+ // Remove authorization if changing hostnames (but not if just
+ // changing ports or protocols). This matches the behavior of curl:
+ // https://github.com/bagder/curl/blob/6beb0eee/lib/http.c#L710
+ request.removeHeader('authorization')
+ }
+ }
+ }
+
+ if (!self.removeRefererHeader) {
+ request.setHeader('referer', uriPrev.href)
+ }
+
+ request.emit('redirect')
+
+ request.init()
+
+ return true
+}
+
+exports.Redirect = Redirect
diff --git a/familyark/app/node_modules/request/lib/tunnel.js b/familyark/app/node_modules/request/lib/tunnel.js
new file mode 100644
index 0000000..4479003
--- /dev/null
+++ b/familyark/app/node_modules/request/lib/tunnel.js
@@ -0,0 +1,175 @@
+'use strict'
+
+var url = require('url')
+var tunnel = require('tunnel-agent')
+
+var defaultProxyHeaderWhiteList = [
+ 'accept',
+ 'accept-charset',
+ 'accept-encoding',
+ 'accept-language',
+ 'accept-ranges',
+ 'cache-control',
+ 'content-encoding',
+ 'content-language',
+ 'content-location',
+ 'content-md5',
+ 'content-range',
+ 'content-type',
+ 'connection',
+ 'date',
+ 'expect',
+ 'max-forwards',
+ 'pragma',
+ 'referer',
+ 'te',
+ 'user-agent',
+ 'via'
+]
+
+var defaultProxyHeaderExclusiveList = [
+ 'proxy-authorization'
+]
+
+function constructProxyHost (uriObject) {
+ var port = uriObject.port
+ var protocol = uriObject.protocol
+ var proxyHost = uriObject.hostname + ':'
+
+ if (port) {
+ proxyHost += port
+ } else if (protocol === 'https:') {
+ proxyHost += '443'
+ } else {
+ proxyHost += '80'
+ }
+
+ return proxyHost
+}
+
+function constructProxyHeaderWhiteList (headers, proxyHeaderWhiteList) {
+ var whiteList = proxyHeaderWhiteList
+ .reduce(function (set, header) {
+ set[header.toLowerCase()] = true
+ return set
+ }, {})
+
+ return Object.keys(headers)
+ .filter(function (header) {
+ return whiteList[header.toLowerCase()]
+ })
+ .reduce(function (set, header) {
+ set[header] = headers[header]
+ return set
+ }, {})
+}
+
+function constructTunnelOptions (request, proxyHeaders) {
+ var proxy = request.proxy
+
+ var tunnelOptions = {
+ proxy: {
+ host: proxy.hostname,
+ port: +proxy.port,
+ proxyAuth: proxy.auth,
+ headers: proxyHeaders
+ },
+ headers: request.headers,
+ ca: request.ca,
+ cert: request.cert,
+ key: request.key,
+ passphrase: request.passphrase,
+ pfx: request.pfx,
+ ciphers: request.ciphers,
+ rejectUnauthorized: request.rejectUnauthorized,
+ secureOptions: request.secureOptions,
+ secureProtocol: request.secureProtocol
+ }
+
+ return tunnelOptions
+}
+
+function constructTunnelFnName (uri, proxy) {
+ var uriProtocol = (uri.protocol === 'https:' ? 'https' : 'http')
+ var proxyProtocol = (proxy.protocol === 'https:' ? 'Https' : 'Http')
+ return [uriProtocol, proxyProtocol].join('Over')
+}
+
+function getTunnelFn (request) {
+ var uri = request.uri
+ var proxy = request.proxy
+ var tunnelFnName = constructTunnelFnName(uri, proxy)
+ return tunnel[tunnelFnName]
+}
+
+function Tunnel (request) {
+ this.request = request
+ this.proxyHeaderWhiteList = defaultProxyHeaderWhiteList
+ this.proxyHeaderExclusiveList = []
+ if (typeof request.tunnel !== 'undefined') {
+ this.tunnelOverride = request.tunnel
+ }
+}
+
+Tunnel.prototype.isEnabled = function () {
+ var self = this
+ var request = self.request
+ // Tunnel HTTPS by default. Allow the user to override this setting.
+
+ // If self.tunnelOverride is set (the user specified a value), use it.
+ if (typeof self.tunnelOverride !== 'undefined') {
+ return self.tunnelOverride
+ }
+
+ // If the destination is HTTPS, tunnel.
+ if (request.uri.protocol === 'https:') {
+ return true
+ }
+
+ // Otherwise, do not use tunnel.
+ return false
+}
+
+Tunnel.prototype.setup = function (options) {
+ var self = this
+ var request = self.request
+
+ options = options || {}
+
+ if (typeof request.proxy === 'string') {
+ request.proxy = url.parse(request.proxy)
+ }
+
+ if (!request.proxy || !request.tunnel) {
+ return false
+ }
+
+ // Setup Proxy Header Exclusive List and White List
+ if (options.proxyHeaderWhiteList) {
+ self.proxyHeaderWhiteList = options.proxyHeaderWhiteList
+ }
+ if (options.proxyHeaderExclusiveList) {
+ self.proxyHeaderExclusiveList = options.proxyHeaderExclusiveList
+ }
+
+ var proxyHeaderExclusiveList = self.proxyHeaderExclusiveList.concat(defaultProxyHeaderExclusiveList)
+ var proxyHeaderWhiteList = self.proxyHeaderWhiteList.concat(proxyHeaderExclusiveList)
+
+ // Setup Proxy Headers and Proxy Headers Host
+ // Only send the Proxy White Listed Header names
+ var proxyHeaders = constructProxyHeaderWhiteList(request.headers, proxyHeaderWhiteList)
+ proxyHeaders.host = constructProxyHost(request.uri)
+
+ proxyHeaderExclusiveList.forEach(request.removeHeader, request)
+
+ // Set Agent from Tunnel Data
+ var tunnelFn = getTunnelFn(request)
+ var tunnelOptions = constructTunnelOptions(request, proxyHeaders)
+ request.agent = tunnelFn(tunnelOptions)
+
+ return true
+}
+
+Tunnel.defaultProxyHeaderWhiteList = defaultProxyHeaderWhiteList
+Tunnel.defaultProxyHeaderExclusiveList = defaultProxyHeaderExclusiveList
+exports.Tunnel = Tunnel
diff --git a/familyark/app/node_modules/request/node_modules/qs/.editorconfig b/familyark/app/node_modules/request/node_modules/qs/.editorconfig
new file mode 100644
index 0000000..b2654e7
--- /dev/null
+++ b/familyark/app/node_modules/request/node_modules/qs/.editorconfig
@@ -0,0 +1,30 @@
+root = true
+
+[*]
+indent_style = space
+indent_size = 4
+end_of_line = lf
+charset = utf-8
+trim_trailing_whitespace = true
+insert_final_newline = true
+max_line_length = 140
+
+[test/*]
+max_line_length = off
+
+[*.md]
+max_line_length = off
+
+[*.json]
+max_line_length = off
+
+[Makefile]
+max_line_length = off
+
+[CHANGELOG.md]
+indent_style = space
+indent_size = 2
+
+[LICENSE]
+indent_size = 2
+max_line_length = off
diff --git a/familyark/app/node_modules/request/node_modules/qs/.eslintignore b/familyark/app/node_modules/request/node_modules/qs/.eslintignore
new file mode 100644
index 0000000..1521c8b
--- /dev/null
+++ b/familyark/app/node_modules/request/node_modules/qs/.eslintignore
@@ -0,0 +1 @@
+dist
diff --git a/familyark/app/node_modules/request/node_modules/qs/.eslintrc b/familyark/app/node_modules/request/node_modules/qs/.eslintrc
new file mode 100644
index 0000000..b7a87b9
--- /dev/null
+++ b/familyark/app/node_modules/request/node_modules/qs/.eslintrc
@@ -0,0 +1,19 @@
+{
+ "root": true,
+
+ "extends": "@ljharb",
+
+ "rules": {
+ "complexity": 0,
+ "consistent-return": 1,
+ "func-name-matching": 0,
+ "id-length": [2, { "min": 1, "max": 25, "properties": "never" }],
+ "indent": [2, 4],
+ "max-params": [2, 12],
+ "max-statements": [2, 45],
+ "no-continue": 1,
+ "no-magic-numbers": 0,
+ "no-restricted-syntax": [2, "BreakStatement", "DebuggerStatement", "ForInStatement", "LabeledStatement", "WithStatement"],
+ "operator-linebreak": [2, "before"],
+ }
+}
diff --git a/familyark/app/node_modules/request/node_modules/qs/CHANGELOG.md b/familyark/app/node_modules/request/node_modules/qs/CHANGELOG.md
new file mode 100644
index 0000000..fe52320
--- /dev/null
+++ b/familyark/app/node_modules/request/node_modules/qs/CHANGELOG.md
@@ -0,0 +1,226 @@
+## **6.5.2**
+- [Fix] use `safer-buffer` instead of `Buffer` constructor
+- [Refactor] utils: `module.exports` one thing, instead of mutating `exports` (#230)
+- [Dev Deps] update `browserify`, `eslint`, `iconv-lite`, `safer-buffer`, `tape`, `browserify`
+
+## **6.5.1**
+- [Fix] Fix parsing & compacting very deep objects (#224)
+- [Refactor] name utils functions
+- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `tape`
+- [Tests] up to `node` `v8.4`; use `nvm install-latest-npm` so newer npm doesn’t break older node
+- [Tests] Use precise dist for Node.js 0.6 runtime (#225)
+- [Tests] make 0.6 required, now that it’s passing
+- [Tests] on `node` `v8.2`; fix npm on node 0.6
+
+## **6.5.0**
+- [New] add `utils.assign`
+- [New] pass default encoder/decoder to custom encoder/decoder functions (#206)
+- [New] `parse`/`stringify`: add `ignoreQueryPrefix`/`addQueryPrefix` options, respectively (#213)
+- [Fix] Handle stringifying empty objects with addQueryPrefix (#217)
+- [Fix] do not mutate `options` argument (#207)
+- [Refactor] `parse`: cache index to reuse in else statement (#182)
+- [Docs] add various badges to readme (#208)
+- [Dev Deps] update `eslint`, `browserify`, `iconv-lite`, `tape`
+- [Tests] up to `node` `v8.1`, `v7.10`, `v6.11`; npm v4.6 breaks on node < v1; npm v5+ breaks on node < v4
+- [Tests] add `editorconfig-tools`
+
+## **6.4.0**
+- [New] `qs.stringify`: add `encodeValuesOnly` option
+- [Fix] follow `allowPrototypes` option during merge (#201, #201)
+- [Fix] support keys starting with brackets (#202, #200)
+- [Fix] chmod a-x
+- [Dev Deps] update `eslint`
+- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds
+- [eslint] reduce warnings
+
+## **6.3.2**
+- [Fix] follow `allowPrototypes` option during merge (#201, #200)
+- [Dev Deps] update `eslint`
+- [Fix] chmod a-x
+- [Fix] support keys starting with brackets (#202, #200)
+- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds
+
+## **6.3.1**
+- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties (thanks, @snyk!)
+- [Dev Deps] update `eslint`, `@ljharb/eslint-config`, `browserify`, `iconv-lite`, `qs-iconv`, `tape`
+- [Tests] on all node minors; improve test matrix
+- [Docs] document stringify option `allowDots` (#195)
+- [Docs] add empty object and array values example (#195)
+- [Docs] Fix minor inconsistency/typo (#192)
+- [Docs] document stringify option `sort` (#191)
+- [Refactor] `stringify`: throw faster with an invalid encoder
+- [Refactor] remove unnecessary escapes (#184)
+- Remove contributing.md, since `qs` is no longer part of `hapi` (#183)
+
+## **6.3.0**
+- [New] Add support for RFC 1738 (#174, #173)
+- [New] `stringify`: Add `serializeDate` option to customize Date serialization (#159)
+- [Fix] ensure `utils.merge` handles merging two arrays
+- [Refactor] only constructors should be capitalized
+- [Refactor] capitalized var names are for constructors only
+- [Refactor] avoid using a sparse array
+- [Robustness] `formats`: cache `String#replace`
+- [Dev Deps] update `browserify`, `eslint`, `@ljharb/eslint-config`; add `safe-publish-latest`
+- [Tests] up to `node` `v6.8`, `v4.6`; improve test matrix
+- [Tests] flesh out arrayLimit/arrayFormat tests (#107)
+- [Tests] skip Object.create tests when null objects are not available
+- [Tests] Turn on eslint for test files (#175)
+
+## **6.2.3**
+- [Fix] follow `allowPrototypes` option during merge (#201, #200)
+- [Fix] chmod a-x
+- [Fix] support keys starting with brackets (#202, #200)
+- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds
+
+## **6.2.2**
+- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties
+
+## **6.2.1**
+- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values
+- [Refactor] Be explicit and use `Object.prototype.hasOwnProperty.call`
+- [Tests] remove `parallelshell` since it does not reliably report failures
+- [Tests] up to `node` `v6.3`, `v5.12`
+- [Dev Deps] update `tape`, `eslint`, `@ljharb/eslint-config`, `qs-iconv`
+
+## [**6.2.0**](https://github.com/ljharb/qs/issues?milestone=36&state=closed)
+- [New] pass Buffers to the encoder/decoder directly (#161)
+- [New] add "encoder" and "decoder" options, for custom param encoding/decoding (#160)
+- [Fix] fix compacting of nested sparse arrays (#150)
+
+## **6.1.2
+- [Fix] follow `allowPrototypes` option during merge (#201, #200)
+- [Fix] chmod a-x
+- [Fix] support keys starting with brackets (#202, #200)
+- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds
+
+## **6.1.1**
+- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties
+
+## [**6.1.0**](https://github.com/ljharb/qs/issues?milestone=35&state=closed)
+- [New] allowDots option for `stringify` (#151)
+- [Fix] "sort" option should work at a depth of 3 or more (#151)
+- [Fix] Restore `dist` directory; will be removed in v7 (#148)
+
+## **6.0.4**
+- [Fix] follow `allowPrototypes` option during merge (#201, #200)
+- [Fix] chmod a-x
+- [Fix] support keys starting with brackets (#202, #200)
+- [Tests] up to `node` `v7.7`, `v6.10`,` v4.8`; disable osx builds since they block linux builds
+
+## **6.0.3**
+- [Fix] ensure that `allowPrototypes: false` does not ever shadow Object.prototype properties
+- [Fix] Restore `dist` directory; will be removed in v7 (#148)
+
+## [**6.0.2**](https://github.com/ljharb/qs/issues?milestone=33&state=closed)
+- Revert ES6 requirement and restore support for node down to v0.8.
+
+## [**6.0.1**](https://github.com/ljharb/qs/issues?milestone=32&state=closed)
+- [**#127**](https://github.com/ljharb/qs/pull/127) Fix engines definition in package.json
+
+## [**6.0.0**](https://github.com/ljharb/qs/issues?milestone=31&state=closed)
+- [**#124**](https://github.com/ljharb/qs/issues/124) Use ES6 and drop support for node < v4
+
+## **5.2.1**
+- [Fix] ensure `key[]=x&key[]&key[]=y` results in 3, not 2, values
+
+## [**5.2.0**](https://github.com/ljharb/qs/issues?milestone=30&state=closed)
+- [**#64**](https://github.com/ljharb/qs/issues/64) Add option to sort object keys in the query string
+
+## [**5.1.0**](https://github.com/ljharb/qs/issues?milestone=29&state=closed)
+- [**#117**](https://github.com/ljharb/qs/issues/117) make URI encoding stringified results optional
+- [**#106**](https://github.com/ljharb/qs/issues/106) Add flag `skipNulls` to optionally skip null values in stringify
+
+## [**5.0.0**](https://github.com/ljharb/qs/issues?milestone=28&state=closed)
+- [**#114**](https://github.com/ljharb/qs/issues/114) default allowDots to false
+- [**#100**](https://github.com/ljharb/qs/issues/100) include dist to npm
+
+## [**4.0.0**](https://github.com/ljharb/qs/issues?milestone=26&state=closed)
+- [**#98**](https://github.com/ljharb/qs/issues/98) make returning plain objects and allowing prototype overwriting properties optional
+
+## [**3.1.0**](https://github.com/ljharb/qs/issues?milestone=24&state=closed)
+- [**#89**](https://github.com/ljharb/qs/issues/89) Add option to disable "Transform dot notation to bracket notation"
+
+## [**3.0.0**](https://github.com/ljharb/qs/issues?milestone=23&state=closed)
+- [**#80**](https://github.com/ljharb/qs/issues/80) qs.parse silently drops properties
+- [**#77**](https://github.com/ljharb/qs/issues/77) Perf boost
+- [**#60**](https://github.com/ljharb/qs/issues/60) Add explicit option to disable array parsing
+- [**#74**](https://github.com/ljharb/qs/issues/74) Bad parse when turning array into object
+- [**#81**](https://github.com/ljharb/qs/issues/81) Add a `filter` option
+- [**#68**](https://github.com/ljharb/qs/issues/68) Fixed issue with recursion and passing strings into objects.
+- [**#66**](https://github.com/ljharb/qs/issues/66) Add mixed array and object dot notation support Closes: #47
+- [**#76**](https://github.com/ljharb/qs/issues/76) RFC 3986
+- [**#85**](https://github.com/ljharb/qs/issues/85) No equal sign
+- [**#84**](https://github.com/ljharb/qs/issues/84) update license attribute
+
+## [**2.4.1**](https://github.com/ljharb/qs/issues?milestone=20&state=closed)
+- [**#73**](https://github.com/ljharb/qs/issues/73) Property 'hasOwnProperty' of object #