From 88d56fff1012edb5af076381ec678c8cd36fa912 Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Thu, 26 Oct 2017 16:27:58 -0400
Subject: [PATCH 01/18] Stuff

---
 lib/pack-codec.js    |   5 +-
 net/git-send-pack.js | 183 +++++++++++++++++++++++++++++++++++++++++++
 2 files changed, 187 insertions(+), 1 deletion(-)
 create mode 100644 net/git-send-pack.js

diff --git a/lib/pack-codec.js b/lib/pack-codec.js
index 93ac7c1..d7ed592 100644
--- a/lib/pack-codec.js
+++ b/lib/pack-codec.js
@@ -89,7 +89,10 @@ function decodePack(emit) {
       state = state(chunk[i], i, chunk);
       position++;
     }
-    if (!state) return;
+      if (!state) {
+          emit();
+          return;
+      }
     if (state !== $checksum) sha1sum.update(chunk);
     var buff = inf.flush();
     if (buff.length) {
diff --git a/net/git-send-pack.js b/net/git-send-pack.js
new file mode 100644
index 0000000..e1f5099
--- /dev/null
+++ b/net/git-send-pack.js
@@ -0,0 +1,183 @@
+"use strict";
+
+var makeChannel = require('culvert');
+var wrapHandler = require('../lib/wrap-handler');
+var bodec = require('bodec');
+
+module.exports = sendPack;
+
+function sendPack(transport, onError) {
+
+  if (!onError) onError = throwIt;
+
+  // Wrap our handler functions to route errors properly.
+  onRef = wrapHandler(onRef, onError);
+  onWant = wrapHandler(onWant, onError);
+  onNak = wrapHandler(onNak, onError);
+  onMore = wrapHandler(onMore, onError);
+  onReady = wrapHandler(onReady, onError);
+
+  var caps = null;
+  var capsSent = false;
+  var refs = {};
+  var haves = {};
+  var havesCount = 0;
+
+  // Create a duplex channel for talking with the agent.
+  var libraryChannel = makeChannel();
+  var agentChannel = makeChannel();
+  var api = {
+    put: libraryChannel.put,
+    drain: libraryChannel.drain,
+    take: agentChannel.take
+  };
+
+  // Start the connection and listen for the response.
+  var socket = transport("git-receive-pack", onError);
+  socket.take(onRef);
+
+  // Return the other half of the duplex API channel.
+  return {
+    put: agentChannel.put,
+    drain: agentChannel.drain,
+    take: libraryChannel.take
+  };
+
+  function onRef(line) {
+    if (line === undefined) {
+      throw new Error("Socket disconnected");
+    }
+    if (line === null) {
+      api.put(refs);
+      api.take(onWant);
+      return;
+    }
+    else if (!caps) {
+      caps = {};
+      Object.defineProperty(refs, "caps", {value: caps});
+      Object.defineProperty(refs, "shallows", {value:[]});
+      var index = line.indexOf("\0");
+      if (index >= 0) {
+        line.substring(index + 1).split(" ").forEach(function (cap) {
+          var i = cap.indexOf("=");
+          if (i >= 0) {
+            caps[cap.substring(0, i)] = cap.substring(i + 1);
+          }
+          else {
+            caps[cap] = true;
+          }
+        });
+        line = line.substring(0, index);
+      }
+    }
+    var match = line.match(/(^[0-9a-f]{40}) (.*)$/);
+    if (!match) {
+      if (typeof line === "string" && /^ERR/i.test(line)) {
+        throw new Error(line);
+      }
+      throw new Error("Invalid line: " + JSON.stringify(line));
+    }
+    refs[match[2]] = match[1];
+    socket.take(onRef);
+  }
+
+  var packChannel;
+  var progressChannel;
+  var errorChannel;
+
+  function onPush(line) {
+    if (line === undefined) return socket.put();
+    if (line === null) {
+      socket.put(null);
+      return api.take(onPush);
+    }
+    if (line.oldhash) {
+      var extra = "";
+      if (!capsSent) {
+        capsSent = true;
+        if (caps["ofs-delta"]) extra += " ofs-delta";
+        if (caps["thin-pack"]) extra += " thin-pack";
+        // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
+        // else if (caps["multi_ack"]) extra +=" multi_ack";
+        if (caps["side-band-64k"]) extra += " side-band-64k";
+        else if (caps["side-band"]) extra += " side-band";
+        // if (caps["agent"]) extra += " agent=" + agent;
+        if (caps.agent) extra += " agent=" + caps.agent;
+      }
+      extra += "\n";
+      socket.put(line.oldhash + " " + line.newhash + " " + line.ref + extra);
+      return api.take(onWant);
+    }
+
+    throw new Error("Invalid push command");
+  }
+
+  function onNak(line) {
+    if (line === undefined) return api.put();
+    if (line === null) return socket.take(onNak);
+    if (bodec.isBinary(line) || line.progress || line.error) {
+      packChannel = makeChannel();
+      progressChannel = makeChannel();
+      errorChannel = makeChannel();
+      api.put({
+        pack: { take: packChannel.take },
+        progress: { take: progressChannel.take },
+        error: { take: errorChannel.take },
+      });
+      return onMore(null, line);
+    }
+    var match = line.match(/^shallow ([0-9a-f]{40})$/);
+    if (match) {
+      refs.shallows.push(match[1]);
+      return socket.take(onNak);
+    }
+    match = line.match(/^ACK ([0-9a-f]{40})$/);
+    if (match) {
+      return socket.take(onNak);
+    }
+    if (line === "NAK") {
+      return socket.take(onNak);
+    }
+    throw new Error("Expected NAK, but got " + JSON.stringify(line));
+  }
+
+  function onMore(line) {
+
+    if (line === undefined) {
+      packChannel.put();
+      progressChannel.put();
+      errorChannel.put();
+      return api.put();
+    }
+    if (line === null) {
+      api.put(line);
+    }
+    else {
+      if (line.progress) {
+        progressChannel.put(line.progress);
+      }
+      else if (line.error) {
+        errorChannel.put(line.error);
+      }
+      else {
+        if (!packChannel.put(line)) {
+          return packChannel.drain(onReady);
+        }
+      }
+    }
+    socket.take(onMore);
+  }
+
+  function onReady() {
+    socket.take(onMore);
+  }
+
+}
+
+var defer = require('js-git/lib/defer');
+function throwIt(err) {
+  defer(function () {
+    throw err;
+  });
+  // throw err;
+}

From 7f1f862a6da7d9d4443b6201504c498795a2e025 Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Thu, 26 Oct 2017 21:01:05 -0400
Subject: [PATCH 02/18] Stuff

---
 net/git-fetch-pack.js |  22 +++++----
 net/git-send-pack.js  | 107 ++++++++++++------------------------------
 net/transport-http.js |  16 +++++--
 3 files changed, 53 insertions(+), 92 deletions(-)

diff --git a/net/git-fetch-pack.js b/net/git-fetch-pack.js
index 4e75303..b347200 100644
--- a/net/git-fetch-pack.js
+++ b/net/git-fetch-pack.js
@@ -103,16 +103,18 @@ function fetchPack(transport, onError) {
     }
     if (line.want) {
       var extra = "";
-      if (!capsSent) {
-        capsSent = true;
-        if (caps["ofs-delta"]) extra += " ofs-delta";
-        if (caps["thin-pack"]) extra += " thin-pack";
-        // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
-        // else if (caps["multi_ack"]) extra +=" multi_ack";
-        if (caps["side-band-64k"]) extra += " side-band-64k";
-        else if (caps["side-band"]) extra += " side-band";
-        // if (caps["agent"]) extra += " agent=" + agent;
-        if (caps.agent) extra += " agent=" + caps.agent;
+        if (!capsSent) {
+            capsSent = true;
+            var caplist = [];
+            if (caps["ofs-delta"]) caplist.push("ofs-delta");
+            if (caps["thin-pack"]) caplist.push("thin-pack");
+            // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
+            // else if (caps["multi_ack"]) extra +=" multi_ack";
+            if (caps["side-band-64k"]) caplist.push("side-band-64k");
+            else if (caps["side-band"]) caplist.push("side-band");
+            // if (caps["agent"]) extra += " agent=" + agent;
+            if (caps.agent) extra += caplist.push("agent=" + caps.agent);
+            extra = " " + caplist.join(" ");
       }
       extra += "\n";
       socket.put("want " + line.want + extra);
diff --git a/net/git-send-pack.js b/net/git-send-pack.js
index e1f5099..de451da 100644
--- a/net/git-send-pack.js
+++ b/net/git-send-pack.js
@@ -12,10 +12,7 @@ function sendPack(transport, onError) {
 
   // Wrap our handler functions to route errors properly.
   onRef = wrapHandler(onRef, onError);
-  onWant = wrapHandler(onWant, onError);
-  onNak = wrapHandler(onNak, onError);
-  onMore = wrapHandler(onMore, onError);
-  onReady = wrapHandler(onReady, onError);
+  onPush = wrapHandler(onPush, onError);
 
   var caps = null;
   var capsSent = false;
@@ -49,13 +46,12 @@ function sendPack(transport, onError) {
     }
     if (line === null) {
       api.put(refs);
-      api.take(onWant);
+      api.take(onPush);
       return;
     }
     else if (!caps) {
       caps = {};
       Object.defineProperty(refs, "caps", {value: caps});
-      Object.defineProperty(refs, "shallows", {value:[]});
       var index = line.indexOf("\0");
       if (index >= 0) {
         line.substring(index + 1).split(" ").forEach(function (cap) {
@@ -89,88 +85,43 @@ function sendPack(transport, onError) {
     if (line === undefined) return socket.put();
     if (line === null) {
       socket.put(null);
-      return api.take(onPush);
+      return api.take(onPack);
     }
     if (line.oldhash) {
-      var extra = "";
-      if (!capsSent) {
-        capsSent = true;
-        if (caps["ofs-delta"]) extra += " ofs-delta";
-        if (caps["thin-pack"]) extra += " thin-pack";
-        // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
-        // else if (caps["multi_ack"]) extra +=" multi_ack";
-        if (caps["side-band-64k"]) extra += " side-band-64k";
-        else if (caps["side-band"]) extra += " side-band";
-        // if (caps["agent"]) extra += " agent=" + agent;
-        if (caps.agent) extra += " agent=" + caps.agent;
+        var extra = "";
+        if (!capsSent) {
+            capsSent = true;
+            var caplist = [];
+            if (caps["ofs-delta"]) caplist.push("ofs-delta");
+            if (caps["thin-pack"]) caplist.push("thin-pack");
+            // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
+            // else if (caps["multi_ack"]) extra +=" multi_ack";
+            if (caps["side-band-64k"]) caplist.push("side-band-64k");
+            else if (caps["side-band"]) caplist.push("side-band");
+            // if (caps["agent"]) extra += " agent=" + agent;
+            if (caps.agent) extra += caplist.push("agent=" + caps.agent);
+            extra = "\0" + caplist.join(" ");
       }
-      extra += "\n";
+        extra += "\n";
       socket.put(line.oldhash + " " + line.newhash + " " + line.ref + extra);
-      return api.take(onWant);
-    }
-
-    throw new Error("Invalid push command");
-  }
-
-  function onNak(line) {
-    if (line === undefined) return api.put();
-    if (line === null) return socket.take(onNak);
-    if (bodec.isBinary(line) || line.progress || line.error) {
-      packChannel = makeChannel();
-      progressChannel = makeChannel();
-      errorChannel = makeChannel();
-      api.put({
-        pack: { take: packChannel.take },
-        progress: { take: progressChannel.take },
-        error: { take: errorChannel.take },
-      });
-      return onMore(null, line);
-    }
-    var match = line.match(/^shallow ([0-9a-f]{40})$/);
-    if (match) {
-      refs.shallows.push(match[1]);
-      return socket.take(onNak);
-    }
-    match = line.match(/^ACK ([0-9a-f]{40})$/);
-    if (match) {
-      return socket.take(onNak);
-    }
-    if (line === "NAK") {
-      return socket.take(onNak);
+      return api.take(onPush);
     }
-    throw new Error("Expected NAK, but got " + JSON.stringify(line));
+      throw new Error("Invalid push command");
   }
 
-  function onMore(line) {
-
-    if (line === undefined) {
-      packChannel.put();
-      progressChannel.put();
-      errorChannel.put();
-      return api.put();
-    }
-    if (line === null) {
-      api.put(line);
-    }
-    else {
-      if (line.progress) {
-        progressChannel.put(line.progress);
-      }
-      else if (line.error) {
-        errorChannel.put(line.error);
-      }
-      else {
-        if (!packChannel.put(line)) {
-          return packChannel.drain(onReady);
+    function onPack(_, line) {
+        if (line.flush) {
+            socket.put(line);
+            socket.take(api.put);
+        } else {
+            socket.put({noframe: line});
         }
-      }
+      return api.take(onPack);
     }
-    socket.take(onMore);
-  }
 
-  function onReady() {
-    socket.take(onMore);
-  }
+    function onResponse(h) {
+        callback(h);
+    }
 
 }
 
diff --git a/net/transport-http.js b/net/transport-http.js
index fd3b0c3..d8c89c2 100644
--- a/net/transport-http.js
+++ b/net/transport-http.js
@@ -80,10 +80,18 @@ module.exports = function (request) {
       }
 
       function onWrite(item) {
-        if (item === undefined) return socket.put();
-        bodyWrite(item);
-        socket.take(onWrite);
-        if (item !== "done\n" || !bodyParts.length) return;
+          if (item === undefined) return socket.put();
+          if (item === null || !item.flush) {
+              if (item !== null && item.noframe !== undefined) {
+                  bodyParts.push(item.noframe);
+              } else {
+                  bodyWrite(item);
+              }
+          }
+          socket.take(onWrite);
+          if (item === null || (!item.flush)) {
+              if ((item !== "done\n" || !bodyParts.length) ) return;
+          }
         var body = bodec.join(bodyParts);
         bodyParts.length = 0;
         request("POST", gitUrl + "/" + serviceName, headers, body, onResult);

From 25ee621e9616e134fbcec95872894ac93dc3dc81 Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Thu, 26 Oct 2017 21:30:20 -0400
Subject: [PATCH 03/18] Fix sending response

---
 net/git-send-pack.js | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/net/git-send-pack.js b/net/git-send-pack.js
index de451da..04addd6 100644
--- a/net/git-send-pack.js
+++ b/net/git-send-pack.js
@@ -112,7 +112,9 @@ function sendPack(transport, onError) {
     function onPack(_, line) {
         if (line.flush) {
             socket.put(line);
-            socket.take(api.put);
+            socket.take(function(_, h) {
+                api.put(h);
+            });
         } else {
             socket.put({noframe: line});
         }

From c2c12a962cc23c307b35958b6d5505aa2ec37b89 Mon Sep 17 00:00:00 2001
From: Jasmin Besic <jbesic@capeannenterprises.com>
Date: Tue, 28 Nov 2017 11:13:52 +0100
Subject: [PATCH 04/18] Add pull, push, commit feature to the js-git

---
 lib/pack-codec.js    |  3 +-
 mixins/high-level.js | 52 ++++++++++++++++++++++++++++++++
 net/git-send-pack.js | 71 +++++++++++++++++++++++---------------------
 3 files changed, 90 insertions(+), 36 deletions(-)
 create mode 100644 mixins/high-level.js

diff --git a/lib/pack-codec.js b/lib/pack-codec.js
index d7ed592..5b7a09f 100644
--- a/lib/pack-codec.js
+++ b/lib/pack-codec.js
@@ -90,8 +90,7 @@ function decodePack(emit) {
       position++;
     }
       if (!state) {
-          emit();
-          return;
+          return emit();
       }
     if (state !== $checksum) sha1sum.update(chunk);
     var buff = inf.flush();
diff --git a/mixins/high-level.js b/mixins/high-level.js
new file mode 100644
index 0000000..a62ac8d
--- /dev/null
+++ b/mixins/high-level.js
@@ -0,0 +1,52 @@
+"use strict";
+
+var request = require('../net/request-xhr');
+var fetchPackProtocol = require('../net/git-fetch-pack');
+var sendPackProtocol = require('../net/git-send-pack');
+
+module.exports = highLevel;
+
+function highLevel(repo, uName, uPass, hostName) {
+
+  require('./mem-db')(repo);
+  require('./create-tree')(repo);
+  require('./read-combiner')(repo);
+  require('./pack-ops')(repo);
+  require('./walkers')(repo);
+  require('./formats')(repo);
+
+  var httpTransport = require('../net/transport-http')(request);
+  var transport = httpTransport(hostName, uName, uPass);
+  var fetch = fetchPackProtocol(transport);
+  var push = sendPackProtocol(transport);
+
+  repo.clone = clone;
+  repo.commit = commit;
+  repo.push = push;
+
+  function clone(callback) {
+    fetch.take(function (err, refs) {
+      fetch.put({
+        want: refs['refs/heads/master']
+      });
+
+      fetch.put(null);
+      fetch.put({
+        done: true
+      });
+
+      fetch.take(function (err, channels) {
+        repo.unpack(channels.pack, {}, function () {
+          repo.updateRef('refs/heads/master', refs['refs/heads/master'], function () {
+            callback('Clonned !');
+          });
+        });
+      });
+    });
+  }
+
+  function commit() {}
+
+  function push() {}
+
+}
\ No newline at end of file
diff --git a/net/git-send-pack.js b/net/git-send-pack.js
index 04addd6..75a685b 100644
--- a/net/git-send-pack.js
+++ b/net/git-send-pack.js
@@ -48,18 +48,18 @@ function sendPack(transport, onError) {
       api.put(refs);
       api.take(onPush);
       return;
-    }
-    else if (!caps) {
+    } else if (!caps) {
       caps = {};
-      Object.defineProperty(refs, "caps", {value: caps});
+      Object.defineProperty(refs, "caps", {
+        value: caps
+      });
       var index = line.indexOf("\0");
       if (index >= 0) {
         line.substring(index + 1).split(" ").forEach(function (cap) {
           var i = cap.indexOf("=");
           if (i >= 0) {
             caps[cap.substring(0, i)] = cap.substring(i + 1);
-          }
-          else {
+          } else {
             caps[cap] = true;
           }
         });
@@ -88,49 +88,52 @@ function sendPack(transport, onError) {
       return api.take(onPack);
     }
     if (line.oldhash) {
-        var extra = "";
-        if (!capsSent) {
-            capsSent = true;
-            var caplist = [];
-            if (caps["ofs-delta"]) caplist.push("ofs-delta");
-            if (caps["thin-pack"]) caplist.push("thin-pack");
-            // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
-            // else if (caps["multi_ack"]) extra +=" multi_ack";
-            if (caps["side-band-64k"]) caplist.push("side-band-64k");
-            else if (caps["side-band"]) caplist.push("side-band");
-            // if (caps["agent"]) extra += " agent=" + agent;
-            if (caps.agent) extra += caplist.push("agent=" + caps.agent);
-            extra = "\0" + caplist.join(" ");
+      var extra = "";
+      if (!capsSent) {
+        capsSent = true;
+        var caplist = [];
+        if (caps["ofs-delta"]) caplist.push("ofs-delta");
+        if (caps["thin-pack"]) caplist.push("thin-pack");
+        // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
+        // else if (caps["multi_ack"]) extra +=" multi_ack";
+        if (caps["side-band-64k"]) caplist.push("side-band-64k");
+        else if (caps["side-band"]) caplist.push("side-band");
+        // if (caps["agent"]) extra += " agent=" + agent;
+        if (caps.agent) extra += caplist.push("agent=" + caps.agent);
+        extra = " " + caplist.join(" ");
       }
-        extra += "\n";
+      extra += "\n";
       socket.put(line.oldhash + " " + line.newhash + " " + line.ref + extra);
       return api.take(onPush);
     }
-      throw new Error("Invalid push command");
+    throw new Error("Invalid push command");
   }
 
-    function onPack(_, line) {
-        if (line.flush) {
-            socket.put(line);
-            socket.take(function(_, h) {
-                api.put(h);
-            });
-        } else {
-            socket.put({noframe: line});
-        }
-      return api.take(onPack);
+  function onPack(_, line) {
+    if (line.flush) {
+      socket.put(line);
+      socket.take(function (_, h) {
+        api.put(h);
+      });
+    } else {
+      socket.put({
+        noframe: line
+      });
     }
+    return api.take(onPack);
+  }
 
-    function onResponse(h) {
-        callback(h);
-    }
+  function onResponse(h) {
+    callback(h);
+  }
 
 }
 
 var defer = require('js-git/lib/defer');
+
 function throwIt(err) {
   defer(function () {
     throw err;
   });
   // throw err;
-}
+}
\ No newline at end of file

From ba23765cd7733463aec10c1d5621602ecdff0027 Mon Sep 17 00:00:00 2001
From: Jasmin Besic <jbesic@capeannenterprises.com>
Date: Tue, 28 Nov 2017 11:17:35 +0100
Subject: [PATCH 05/18] Revert original state for a caps

---
 net/git-fetch-pack.js | 22 ++++++++++------------
 net/git-send-pack.js  | 12 +++++-------
 2 files changed, 15 insertions(+), 19 deletions(-)

diff --git a/net/git-fetch-pack.js b/net/git-fetch-pack.js
index b347200..4e75303 100644
--- a/net/git-fetch-pack.js
+++ b/net/git-fetch-pack.js
@@ -103,18 +103,16 @@ function fetchPack(transport, onError) {
     }
     if (line.want) {
       var extra = "";
-        if (!capsSent) {
-            capsSent = true;
-            var caplist = [];
-            if (caps["ofs-delta"]) caplist.push("ofs-delta");
-            if (caps["thin-pack"]) caplist.push("thin-pack");
-            // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
-            // else if (caps["multi_ack"]) extra +=" multi_ack";
-            if (caps["side-band-64k"]) caplist.push("side-band-64k");
-            else if (caps["side-band"]) caplist.push("side-band");
-            // if (caps["agent"]) extra += " agent=" + agent;
-            if (caps.agent) extra += caplist.push("agent=" + caps.agent);
-            extra = " " + caplist.join(" ");
+      if (!capsSent) {
+        capsSent = true;
+        if (caps["ofs-delta"]) extra += " ofs-delta";
+        if (caps["thin-pack"]) extra += " thin-pack";
+        // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
+        // else if (caps["multi_ack"]) extra +=" multi_ack";
+        if (caps["side-band-64k"]) extra += " side-band-64k";
+        else if (caps["side-band"]) extra += " side-band";
+        // if (caps["agent"]) extra += " agent=" + agent;
+        if (caps.agent) extra += " agent=" + caps.agent;
       }
       extra += "\n";
       socket.put("want " + line.want + extra);
diff --git a/net/git-send-pack.js b/net/git-send-pack.js
index 75a685b..fe2113f 100644
--- a/net/git-send-pack.js
+++ b/net/git-send-pack.js
@@ -91,16 +91,14 @@ function sendPack(transport, onError) {
       var extra = "";
       if (!capsSent) {
         capsSent = true;
-        var caplist = [];
-        if (caps["ofs-delta"]) caplist.push("ofs-delta");
-        if (caps["thin-pack"]) caplist.push("thin-pack");
+        if (caps["ofs-delta"]) extra += " ofs-delta";
+        if (caps["thin-pack"]) extra += " thin-pack";
         // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
         // else if (caps["multi_ack"]) extra +=" multi_ack";
-        if (caps["side-band-64k"]) caplist.push("side-band-64k");
-        else if (caps["side-band"]) caplist.push("side-band");
+        if (caps["side-band-64k"]) extra += " side-band-64k";
+        else if (caps["side-band"]) extra += " side-band";
         // if (caps["agent"]) extra += " agent=" + agent;
-        if (caps.agent) extra += caplist.push("agent=" + caps.agent);
-        extra = " " + caplist.join(" ");
+        if (caps.agent) extra += " agent=" + caps.agent;
       }
       extra += "\n";
       socket.put(line.oldhash + " " + line.newhash + " " + line.ref + extra);

From 5a23cd378eda2d587d765c34e1bc7e9a28a541ab Mon Sep 17 00:00:00 2001
From: Jasmin Besic <jbesic@capeannenterprises.com>
Date: Tue, 28 Nov 2017 14:17:45 +0100
Subject: [PATCH 06/18] Collect files function

---
 mixins/high-level.js | 128 ++++++++++++++++++++++++++++++++++++++++++-
 1 file changed, 125 insertions(+), 3 deletions(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index a62ac8d..b61d421 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -23,6 +23,7 @@ function highLevel(repo, uName, uPass, hostName) {
   repo.clone = clone;
   repo.commit = commit;
   repo.push = push;
+  repo.resolveRepo = resolveRepo;
 
   function clone(callback) {
     fetch.take(function (err, refs) {
@@ -38,15 +39,136 @@ function highLevel(repo, uName, uPass, hostName) {
       fetch.take(function (err, channels) {
         repo.unpack(channels.pack, {}, function () {
           repo.updateRef('refs/heads/master', refs['refs/heads/master'], function () {
-            callback('Clonned !');
+            return callback('Clonned !');
           });
         });
       });
     });
   }
 
-  function commit() {}
+  function commit(callback) {
+    repo.readRef('refs/heads/master', function(err, refHash) {
+      repo.loadAs('commit', refHash, function(err, commit) {
+        // Changes to files that already exists
+        var changes = [
+          {
+              path: "/test/justAdded22.txt",
+              mode: modes.file,
+              content: ""
+          },
+          {
+              path: "/test/second.txt",
+              mode: modes.file,
+              content: "This is the updated content 111safa."
+          }
+        ];
+        changes['base'] = commit.tree;
 
-  function push() {}
+        repo.createTree(changes, function(err, treeHash) {
+          var commitMessage = {
+            author: {
+                name: commit.author.name,
+                email: commit.author.email
+            },
+            tree: treeHash,
+            parent: refHash,
+            message: "This is the commit message.\n"
+          }
+
+          repo.saveAs('commit', commitMessage, function(err, commitHash) {
+            repo.updateRef('refs/heads/master', commitHash, function(err, res) {
+              return callback('Commit done !');
+            });
+          });
+        });
+      });
+    });
+  }
+
+  function push(callback) {
+    repo.readRef('refs/heads/master', function(err, refHash) {
+      repo.loadAs('commit', refHash, function(err, commit) {
+        push.take(function() {
+          push.put({ oldhash: commit.parents[0], newhash: refHash, ref: 'refs/heads/master' });
+          push.put(null);
+
+          var hashes = [refHash];
+          repo.treeWalk(commit.tree, function(err, item) {
+            function collectHashes(err, object) {
+              if (object !== undefined) {
+                hashes.push(object);
+                item.read(collectHashes);
+              } else {
+                repo.pack(hashes, {}, function(err, stream) {
+                  function putHashes(err, packObject) {
+                    if (packObject !== undefined) {
+                      push.put(packObject);
+                      stream.take(putHashes);
+                    } else {
+                      push.put({flush: true});
+                      return callback('Push done !');
+                    }
+                  }
+
+                  stream.take(putHashes);
+                });
+              }
+            }
+
+            item.read(collectHashes);
+          });
+        });
+      });
+    });
+  }
+
+  function resolveRepo(callback) {
+    repo.readRef('refs/heads/master', function(err, refHash) {
+      repo.loadAs('commit', refHash, function(err, commit) {
+        if (commit === undefined) { return callback(); }
+
+        var files = [];
+        repo.treeWalk(commit.tree, function(err, item) {
+          /*
+            {
+              '/': {
+                mode: xxx,
+                hash: xzz,
+                'folder 1': {
+                  mode: xxx,
+                  hash: xzz,
+                  text.txt: {
+                    mode: xxx,
+                    hash: xzz,
+                    content: 'asasgfasgagga'
+                  }
+                }
+              }
+            }
+          */
+          function collectFiles(err, object) {
+            if (object !== undefined) {
+              var loadType = object.mode === 16384 ? 'tree' : 'text';
+              console.log(object);
+              var pathArray = object.path.split('/').filter(function(element) {
+                return element.length !== 0;
+              });
+
+              console.log(pathArray);
+              repo.loadAs(loadType, object.hash, function(err, content) {
+                //console.log(content);
+                //files.push(content);
+                item.read(collectFiles);
+              });
+            } else {
+              return callback(files);
+            }
+          }
+
+          item.read(collectFiles);
+        });
+      });
+    });
+  }
 
 }
\ No newline at end of file

From 33c6df173038fba1ca9706faca1f75597e8bc573 Mon Sep 17 00:00:00 2001
From: Jasmin Besic <jbesic@capeannenterprises.com>
Date: Tue, 28 Nov 2017 17:07:03 +0100
Subject: [PATCH 07/18] Mimic a repo structure in the memory

---
 mixins/high-level.js | 47 ++++++++++++++++++++++----------------------
 1 file changed, 23 insertions(+), 24 deletions(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index b61d421..5b15e96 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -127,41 +127,40 @@ function highLevel(repo, uName, uPass, hostName) {
       repo.loadAs('commit', refHash, function(err, commit) {
         if (commit === undefined) { return callback(); }
 
-        var files = [];
+        var repoStructure = {};
         repo.treeWalk(commit.tree, function(err, item) {
-          /*
-            {
-              '/': {
-                mode: xxx,
-                hash: xzz,
-                'folder 1': {
-                  mode: xxx,
-                  hash: xzz,
-                  text.txt: {
-                    mode: xxx,
-                    hash: xzz,
-                    content: 'asasgfasgagga'
-                  }
-                }
-              }
-            }
-          */
           function collectFiles(err, object) {
             if (object !== undefined) {
+              var temp = repoStructure;
               var loadType = object.mode === 16384 ? 'tree' : 'text';
-              console.log(object);
               var pathArray = object.path.split('/').filter(function(element) {
-                return element.length !== 0;
+                return element.length > 0;
               });
 
-              console.log(pathArray);
+              pathArray = ['/'].concat(pathArray);
+
               repo.loadAs(loadType, object.hash, function(err, content) {
-                //console.log(content);
-                //files.push(content);
+                pathArray.forEach(function(element) {
+                  if (temp.hasOwnProperty(element)) {
+                    temp = temp[element]
+                    return true;
+                  }
+
+                  temp[element] = {
+                    hash: object.hash,
+                    mode: object.mode,
+                    path: object.path
+                  };
+
+                  if (loadType === 'text') {
+                    temp[element].content = content;
+                  }
+                });
+
                 item.read(collectFiles);
               });
             } else {
-              return callback(files);
+              return callback(repoStructure);
             }
           }
 

From 7bb2234caa721d036c1fd8036521d43e812710ed Mon Sep 17 00:00:00 2001
From: Jasmin Besic <jbesic@capeannenterprises.com>
Date: Mon, 4 Dec 2017 11:02:21 +0100
Subject: [PATCH 08/18] Solve conflict with function names.

---
 mixins/high-level.js | 91 +++++++++++++++-----------------------------
 1 file changed, 31 insertions(+), 60 deletions(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index 5b15e96..c95834f 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -17,26 +17,27 @@ function highLevel(repo, uName, uPass, hostName) {
 
   var httpTransport = require('../net/transport-http')(request);
   var transport = httpTransport(hostName, uName, uPass);
-  var fetch = fetchPackProtocol(transport);
-  var push = sendPackProtocol(transport);
+  var fetchStream = fetchPackProtocol(transport);
+  var pushStream = sendPackProtocol(transport);
 
   repo.clone = clone;
   repo.commit = commit;
   repo.push = push;
   repo.resolveRepo = resolveRepo;
+  repo.getContentByHash = getContentByHash;
 
   function clone(callback) {
-    fetch.take(function (err, refs) {
-      fetch.put({
+    fetchStream.take(function (err, refs) {
+      fetchStream.put({
         want: refs['refs/heads/master']
       });
 
-      fetch.put(null);
-      fetch.put({
+      fetchStream.put(null);
+      fetchStream.put({
         done: true
       });
 
-      fetch.take(function (err, channels) {
+      fetchStream.take(function (err, channels) {
         repo.unpack(channels.pack, {}, function () {
           repo.updateRef('refs/heads/master', refs['refs/heads/master'], function () {
             return callback('Clonned !');
@@ -46,25 +47,13 @@ function highLevel(repo, uName, uPass, hostName) {
     });
   }
 
-  function commit(callback) {
+  function commit(data, message, callback) {
     repo.readRef('refs/heads/master', function(err, refHash) {
       repo.loadAs('commit', refHash, function(err, commit) {
         // Changes to files that already exists
-        var changes = [
-          {
-              path: "/test/justAdded22.txt",
-              mode: modes.file,
-              content: ""
-          },
-          {
-              path: "/test/second.txt",
-              mode: modes.file,
-              content: "This is the updated content 111safa."
-          }
-        ];
-        changes['base'] = commit.tree;
+        data['base'] = commit.tree;
 
-        repo.createTree(changes, function(err, treeHash) {
+        repo.createTree(data, function(err, treeHash) {
           var commitMessage = {
             author: {
                 name: commit.author.name,
@@ -72,7 +61,7 @@ function highLevel(repo, uName, uPass, hostName) {
             },
             tree: treeHash,
             parent: refHash,
-            message: "This is the commit message.\n"
+            message: message
           }
 
           repo.saveAs('commit', commitMessage, function(err, commitHash) {
@@ -88,9 +77,9 @@ function highLevel(repo, uName, uPass, hostName) {
   function push(callback) {
     repo.readRef('refs/heads/master', function(err, refHash) {
       repo.loadAs('commit', refHash, function(err, commit) {
-        push.take(function() {
-          push.put({ oldhash: commit.parents[0], newhash: refHash, ref: 'refs/heads/master' });
-          push.put(null);
+        pushStream.take(function() {
+          pushStream.put({ oldhash: commit.parents[0], newhash: refHash, ref: 'refs/heads/master' });
+          pushStream.put(null);
 
           var hashes = [refHash];
           repo.treeWalk(commit.tree, function(err, item) {
@@ -102,11 +91,11 @@ function highLevel(repo, uName, uPass, hostName) {
                 repo.pack(hashes, {}, function(err, stream) {
                   function putHashes(err, packObject) {
                     if (packObject !== undefined) {
-                      push.put(packObject);
+                      pushStream.put(packObject);
                       stream.take(putHashes);
                     } else {
-                      push.put({flush: true});
-                      return callback('Push done !');
+                      pushStream.put({flush: true});
+                      return callback('pushStream done !');
                     }
                   }
 
@@ -122,6 +111,12 @@ function highLevel(repo, uName, uPass, hostName) {
     });
   }
 
+  function getContentByHash(hash, callback){
+    repo.loadAs('text', hash, function(err, content){
+      callback(content);
+    })
+  }
+
   function resolveRepo(callback) {
     repo.readRef('refs/heads/master', function(err, refHash) {
       repo.loadAs('commit', refHash, function(err, commit) {
@@ -129,45 +124,21 @@ function highLevel(repo, uName, uPass, hostName) {
 
         var repoStructure = {};
         repo.treeWalk(commit.tree, function(err, item) {
+          
           function collectFiles(err, object) {
             if (object !== undefined) {
-              var temp = repoStructure;
-              var loadType = object.mode === 16384 ? 'tree' : 'text';
-              var pathArray = object.path.split('/').filter(function(element) {
-                return element.length > 0;
-              });
-
-              pathArray = ['/'].concat(pathArray);
-
-              repo.loadAs(loadType, object.hash, function(err, content) {
-                pathArray.forEach(function(element) {
-                  if (temp.hasOwnProperty(element)) {
-                    temp = temp[element]
-                    return true;
-                  }
-
-                  temp[element] = {
-                    hash: object.hash,
-                    mode: object.mode,
-                    path: object.path
-                  };
-
-                  if (loadType === 'text') {
-                    temp[element].content = content;
-                  }
-                });
-
-                item.read(collectFiles);
-              });
-            } else {
-              return callback(repoStructure);
+              repoStructure[object.path] = object;
+              item.read(collectFiles);
+            }
+            else {
+              return;
             }
           }
 
           item.read(collectFiles);
+          callback(repoStructure);
         });
       });
     });
   }
-
 }
\ No newline at end of file

From 22bf38f7ec76f682f9f1b4d3edcc8c0bb0b9db51 Mon Sep 17 00:00:00 2001
From: Jasmin Besic <jbesic@capeannenterprises.com>
Date: Mon, 4 Dec 2017 11:42:31 +0100
Subject: [PATCH 09/18] Fix for the push function

---
 mixins/high-level.js | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index c95834f..5e30697 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -85,7 +85,7 @@ function highLevel(repo, uName, uPass, hostName) {
           repo.treeWalk(commit.tree, function(err, item) {
             function collectHashes(err, object) {
               if (object !== undefined) {
-                hashes.push(object);
+                hashes.push(object.hash);
                 item.read(collectHashes);
               } else {
                 repo.pack(hashes, {}, function(err, stream) {

From 58812ac6437b5e205b2fce7e1ca4d0acdd9ab1d1 Mon Sep 17 00:00:00 2001
From: Jasmin Besic <jbesic@capeannenterprises.com>
Date: Mon, 4 Dec 2017 16:20:57 +0100
Subject: [PATCH 10/18] Cleaning the code

---
 mixins/high-level.js | 12 ++++--------
 1 file changed, 4 insertions(+), 8 deletions(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index 5e30697..3db4da3 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -40,7 +40,7 @@ function highLevel(repo, uName, uPass, hostName) {
       fetchStream.take(function (err, channels) {
         repo.unpack(channels.pack, {}, function () {
           repo.updateRef('refs/heads/master', refs['refs/heads/master'], function () {
-            return callback('Clonned !');
+            return callback('Repo is clonned.');
           });
         });
       });
@@ -51,8 +51,7 @@ function highLevel(repo, uName, uPass, hostName) {
     repo.readRef('refs/heads/master', function(err, refHash) {
       repo.loadAs('commit', refHash, function(err, commit) {
         // Changes to files that already exists
-        data['base'] = commit.tree;
-
+        data.base = commit.tree;
         repo.createTree(data, function(err, treeHash) {
           var commitMessage = {
             author: {
@@ -66,7 +65,7 @@ function highLevel(repo, uName, uPass, hostName) {
 
           repo.saveAs('commit', commitMessage, function(err, commitHash) {
             repo.updateRef('refs/heads/master', commitHash, function(err, res) {
-              return callback('Commit done !');
+              return callback('Commit done.');
             });
           });
         });
@@ -95,7 +94,7 @@ function highLevel(repo, uName, uPass, hostName) {
                       stream.take(putHashes);
                     } else {
                       pushStream.put({flush: true});
-                      return callback('pushStream done !');
+                      return callback('Push done.');
                     }
                   }
 
@@ -130,9 +129,6 @@ function highLevel(repo, uName, uPass, hostName) {
               repoStructure[object.path] = object;
               item.read(collectFiles);
             }
-            else {
-              return;
-            }
           }
 
           item.read(collectFiles);

From fa9022e70562d1e01b71a7ef5126434c68afba36 Mon Sep 17 00:00:00 2001
From: Jasmin Besic <jbesic@capeannenterprises.com>
Date: Tue, 5 Dec 2017 14:47:02 +0100
Subject: [PATCH 11/18] Fix commit/push feature

---
 net/git-fetch-pack.js | 12 +++++++-----
 net/git-send-pack.js  | 20 +++++++++++---------
 2 files changed, 18 insertions(+), 14 deletions(-)

diff --git a/net/git-fetch-pack.js b/net/git-fetch-pack.js
index 4e75303..aee8347 100644
--- a/net/git-fetch-pack.js
+++ b/net/git-fetch-pack.js
@@ -105,14 +105,16 @@ function fetchPack(transport, onError) {
       var extra = "";
       if (!capsSent) {
         capsSent = true;
-        if (caps["ofs-delta"]) extra += " ofs-delta";
-        if (caps["thin-pack"]) extra += " thin-pack";
+        var caplist = [];
+        if (caps["ofs-delta"]) caplist.push("ofs-delta");
+        if (caps["thin-pack"]) caplist.push("thin-pack");
         // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
         // else if (caps["multi_ack"]) extra +=" multi_ack";
-        if (caps["side-band-64k"]) extra += " side-band-64k";
-        else if (caps["side-band"]) extra += " side-band";
+        if (caps["side-band-64k"]) caplist.push("side-band-64k");
+        else if (caps["side-band"]) caplist.push("side-band");
         // if (caps["agent"]) extra += " agent=" + agent;
-        if (caps.agent) extra += " agent=" + caps.agent;
+        if (caps.agent) extra += caplist.push("agent=" + caps.agent);
+        extra = " " + caplist.join(" ");
       }
       extra += "\n";
       socket.put("want " + line.want + extra);
diff --git a/net/git-send-pack.js b/net/git-send-pack.js
index fe2113f..7bf533c 100644
--- a/net/git-send-pack.js
+++ b/net/git-send-pack.js
@@ -90,15 +90,17 @@ function sendPack(transport, onError) {
     if (line.oldhash) {
       var extra = "";
       if (!capsSent) {
-        capsSent = true;
-        if (caps["ofs-delta"]) extra += " ofs-delta";
-        if (caps["thin-pack"]) extra += " thin-pack";
-        // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
-        // else if (caps["multi_ack"]) extra +=" multi_ack";
-        if (caps["side-band-64k"]) extra += " side-band-64k";
-        else if (caps["side-band"]) extra += " side-band";
-        // if (caps["agent"]) extra += " agent=" + agent;
-        if (caps.agent) extra += " agent=" + caps.agent;
+          capsSent = true;
+          var caplist = [];
+          if (caps["ofs-delta"]) caplist.push("ofs-delta");
+          if (caps["thin-pack"]) caplist.push("thin-pack");
+          // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
+          // else if (caps["multi_ack"]) extra +=" multi_ack";
+          if (caps["side-band-64k"]) caplist.push("side-band-64k");
+          else if (caps["side-band"]) caplist.push("side-band");
+          // if (caps["agent"]) extra += " agent=" + agent;
+          if (caps.agent) extra += caplist.push("agent=" + caps.agent);
+          extra = "\0" + caplist.join(" ");
       }
       extra += "\n";
       socket.put(line.oldhash + " " + line.newhash + " " + line.ref + extra);

From f7a656327b6bbbeff0965c496ab2c83c816408bb Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Fri, 8 Dec 2017 11:40:25 -0500
Subject: [PATCH 12/18] Clone does not fail on empty repo with no master
 branch.

---
 mixins/high-level.js | 8 ++++++--
 1 file changed, 6 insertions(+), 2 deletions(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index 3db4da3..51d588a 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -28,6 +28,10 @@ function highLevel(repo, uName, uPass, hostName) {
 
   function clone(callback) {
     fetchStream.take(function (err, refs) {
+      if (!refs['refs/heads/master']) {
+        return callback('Repo does not have a master branch');
+      }
+
       fetchStream.put({
         want: refs['refs/heads/master']
       });
@@ -123,7 +127,7 @@ function highLevel(repo, uName, uPass, hostName) {
 
         var repoStructure = {};
         repo.treeWalk(commit.tree, function(err, item) {
-          
+
           function collectFiles(err, object) {
             if (object !== undefined) {
               repoStructure[object.path] = object;
@@ -137,4 +141,4 @@ function highLevel(repo, uName, uPass, hostName) {
       });
     });
   }
-}
\ No newline at end of file
+}

From 166a87943a914e5e722d17ef68dab0199070e521 Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Fri, 15 Dec 2017 11:44:51 -0500
Subject: [PATCH 13/18] Create new fetchPackProtocol/sendPackProtocol per
 highlevel operation.

---
 mixins/high-level.js | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index 51d588a..1411997 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -10,23 +10,22 @@ function highLevel(repo, uName, uPass, hostName) {
 
   require('./mem-db')(repo);
   require('./create-tree')(repo);
-  require('./read-combiner')(repo);
   require('./pack-ops')(repo);
   require('./walkers')(repo);
   require('./formats')(repo);
 
   var httpTransport = require('../net/transport-http')(request);
   var transport = httpTransport(hostName, uName, uPass);
-  var fetchStream = fetchPackProtocol(transport);
-  var pushStream = sendPackProtocol(transport);
 
   repo.clone = clone;
   repo.commit = commit;
   repo.push = push;
   repo.resolveRepo = resolveRepo;
   repo.getContentByHash = getContentByHash;
+  repo.transport = transport;
 
   function clone(callback) {
+    var fetchStream = fetchPackProtocol(this.transport);
     fetchStream.take(function (err, refs) {
       if (!refs['refs/heads/master']) {
         return callback('Repo does not have a master branch');
@@ -44,7 +43,7 @@ function highLevel(repo, uName, uPass, hostName) {
       fetchStream.take(function (err, channels) {
         repo.unpack(channels.pack, {}, function () {
           repo.updateRef('refs/heads/master', refs['refs/heads/master'], function () {
-            return callback('Repo is clonned.');
+            return callback('Repo is cloned.');
           });
         });
       });
@@ -78,8 +77,10 @@ function highLevel(repo, uName, uPass, hostName) {
   }
 
   function push(callback) {
+    var self = this;
     repo.readRef('refs/heads/master', function(err, refHash) {
       repo.loadAs('commit', refHash, function(err, commit) {
+        var pushStream = sendPackProtocol(self.transport);
         pushStream.take(function() {
           pushStream.put({ oldhash: commit.parents[0], newhash: refHash, ref: 'refs/heads/master' });
           pushStream.put(null);

From 48330fe27f18e93eb3537bfc3a966b9cbd5968c8 Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Sat, 16 Dec 2017 22:53:01 -0500
Subject: [PATCH 14/18] Fix resolveRepo to collect entire listing before making
 callback.

---
 mixins/high-level.js | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index 1411997..4f0053d 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -133,11 +133,12 @@ function highLevel(repo, uName, uPass, hostName) {
             if (object !== undefined) {
               repoStructure[object.path] = object;
               item.read(collectFiles);
+            } else {
+                callback(repoStructure);
             }
           }
 
           item.read(collectFiles);
-          callback(repoStructure);
         });
       });
     });

From e09a76e0ed1b99bb754a6efd74c0493c78c68dc0 Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Wed, 3 Jan 2018 10:44:42 -0500
Subject: [PATCH 15/18] Highlevel API supports branches.  Support creating new
 histories with no parent.

---
 mixins/high-level.js | 73 ++++++++++++++++++++++++++++----------------
 1 file changed, 46 insertions(+), 27 deletions(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index 4f0053d..ea09c75 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -1,3 +1,5 @@
+// -*- mode: js; js-indent-level: 2; -*-
+
 "use strict";
 
 var request = require('../net/request-xhr');
@@ -24,15 +26,24 @@ function highLevel(repo, uName, uPass, hostName) {
   repo.getContentByHash = getContentByHash;
   repo.transport = transport;
 
-  function clone(callback) {
+  function remoteRefs(callback) {
+    var fetchStream = fetchPackProtocol(this.transport);
+    fetchStream.take(callback);
+  }
+
+  function clone(branch, callback) {
     var fetchStream = fetchPackProtocol(this.transport);
     fetchStream.take(function (err, refs) {
-      if (!refs['refs/heads/master']) {
-        return callback('Repo does not have a master branch');
+      if (!refs[branch]) {
+	// create empty branch
+        repo.updateRef(branch, "0000000000000000000000000000000000000000", function () {
+          callback('create empty branch '+branch);
+        });
+	  return;
       }
 
       fetchStream.put({
-        want: refs['refs/heads/master']
+        want: refs[branch]
       });
 
       fetchStream.put(null);
@@ -42,7 +53,7 @@ function highLevel(repo, uName, uPass, hostName) {
 
       fetchStream.take(function (err, channels) {
         repo.unpack(channels.pack, {}, function () {
-          repo.updateRef('refs/heads/master', refs['refs/heads/master'], function () {
+          repo.updateRef(branch, refs[branch], function () {
             return callback('Repo is cloned.');
           });
         });
@@ -50,24 +61,24 @@ function highLevel(repo, uName, uPass, hostName) {
     });
   }
 
-  function commit(data, message, callback) {
-    repo.readRef('refs/heads/master', function(err, refHash) {
-      repo.loadAs('commit', refHash, function(err, commit) {
+  function commit(branch, changes, metadata, callback) {
+    repo.readRef(branch, function(err, refHash) {
+      repo.loadAs('commit', refHash, function(err, parentcommit) {
         // Changes to files that already exists
-        data.base = commit.tree;
-        repo.createTree(data, function(err, treeHash) {
-          var commitMessage = {
-            author: {
-                name: commit.author.name,
-                email: commit.author.email
-            },
+        changes.base = parentcommit.tree;
+        repo.createTree(changes, function(err, treeHash) {
+          var commitObj = {
             tree: treeHash,
-            parent: refHash,
-            message: message
+            author: metadata.author,
+            message: metadata.message
           }
 
-          repo.saveAs('commit', commitMessage, function(err, commitHash) {
-            repo.updateRef('refs/heads/master', commitHash, function(err, res) {
+	  if (refHash != "0000000000000000000000000000000000000000") {
+	    commitObj.parent = refHash;
+	  }
+
+          repo.saveAs('commit', commitObj, function(err, commitHash) {
+            repo.updateRef(branch, commitHash, function(err, res) {
               return callback('Commit done.');
             });
           });
@@ -76,13 +87,17 @@ function highLevel(repo, uName, uPass, hostName) {
     });
   }
 
-  function push(callback) {
+  function push(branch, callback) {
     var self = this;
-    repo.readRef('refs/heads/master', function(err, refHash) {
+    repo.readRef(branch, function(err, refHash) {
       repo.loadAs('commit', refHash, function(err, commit) {
         var pushStream = sendPackProtocol(self.transport);
         pushStream.take(function() {
-          pushStream.put({ oldhash: commit.parents[0], newhash: refHash, ref: 'refs/heads/master' });
+	  if (commit.parents[0] === undefined) {
+            pushStream.put({ oldhash: "0000000000000000000000000000000000000000", newhash: refHash, ref: branch });
+	  } else {
+	    pushStream.put({ oldhash: commit.parents[0], newhash: refHash, ref: branch });
+	  }
           pushStream.put(null);
 
           var hashes = [refHash];
@@ -121,14 +136,18 @@ function highLevel(repo, uName, uPass, hostName) {
     })
   }
 
-  function resolveRepo(callback) {
-    repo.readRef('refs/heads/master', function(err, refHash) {
+  function resolveRepo(branch, callback) {
+    repo.readRef(branch, function(err, refHash) {
       repo.loadAs('commit', refHash, function(err, commit) {
-        if (commit === undefined) { return callback(); }
-
         var repoStructure = {};
-        repo.treeWalk(commit.tree, function(err, item) {
+        if (commit === undefined || commit.length === 0) {
+	  repoStructure["/"] = {
+	    body: {}
+	  };
+	  return callback(repoStructure);
+	}
 
+        repo.treeWalk(commit.tree, function(err, item) {
           function collectFiles(err, object) {
             if (object !== undefined) {
               repoStructure[object.path] = object;

From 5423d4ecf71dd6b6cadba56ead76141fde307272 Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Thu, 4 Jan 2018 10:07:28 -0500
Subject: [PATCH 16/18] Add http-db experiment.

---
 lib/pack-index.js     |  87 +++++++++++++++++++
 mixins/fs-db.js       |  84 +------------------
 mixins/high-level.js  |  14 +++-
 mixins/http-db.js     | 191 ++++++++++++++++++++++++++++++++++++++++++
 net/git-fetch-pack.js |   2 +
 net/request-xhr.js    |  16 +++-
 6 files changed, 305 insertions(+), 89 deletions(-)
 create mode 100644 lib/pack-index.js
 create mode 100644 mixins/http-db.js

diff --git a/lib/pack-index.js b/lib/pack-index.js
new file mode 100644
index 0000000..812391c
--- /dev/null
+++ b/lib/pack-index.js
@@ -0,0 +1,87 @@
+var bodec = require('bodec');
+var sha1 = require('git-sha1');
+
+exports.parseIndex = parseIndex;
+
+function parseIndex(buffer) {
+  if (readUint32(buffer, 0) !== 0xff744f63 ||
+      readUint32(buffer, 4) !== 0x00000002) {
+    throw new Error("Only v2 pack indexes supported");
+  }
+
+  // Get the number of hashes in index
+  // This is the value of the last fan-out entry
+  var hashOffset = 8 + 255 * 4;
+  var length = readUint32(buffer, hashOffset);
+  hashOffset += 4;
+  var crcOffset = hashOffset + 20 * length;
+  var lengthOffset = crcOffset + 4 * length;
+  var largeOffset = lengthOffset + 4 * length;
+  var checkOffset = largeOffset;
+  var indexes = new Array(length);
+  for (var i = 0; i < length; i++) {
+    var start = hashOffset + i * 20;
+    var hash = bodec.toHex(bodec.slice(buffer, start, start + 20));
+    var crc = readUint32(buffer, crcOffset + i * 4);
+    var offset = readUint32(buffer, lengthOffset + i * 4);
+    if (offset & 0x80000000) {
+      offset = largeOffset + (offset &0x7fffffff) * 8;
+      checkOffset = Math.max(checkOffset, offset + 8);
+      offset = readUint64(buffer, offset);
+    }
+    indexes[i] = {
+      hash: hash,
+      offset: offset,
+      crc: crc
+    };
+  }
+  var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20));
+  var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40));
+  if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) {
+    throw new Error("Checksum mistmatch");
+  }
+
+  var byHash = {};
+  indexes.sort(function (a, b) {
+    return a.offset - b.offset;
+  });
+  indexes.forEach(function (data) {
+    byHash[data.hash] = {
+      offset: data.offset,
+      crc: data.crc,
+    };
+  });
+  var offsets = indexes.map(function (entry) {
+    return entry.offset;
+  }).sort(function (a, b) {
+    return a - b;
+  });
+
+  return {
+    offsets: offsets,
+    byHash: byHash,
+    checksum: packChecksum
+  };
+}
+
+function readUint32(buffer, offset) {
+  return (buffer[offset] << 24 |
+          buffer[offset + 1] << 16 |
+          buffer[offset + 2] << 8 |
+          buffer[offset + 3] << 0) >>> 0;
+}
+
+// Yes this will lose precision over 2^53, but that can't be helped when
+// returning a single integer.
+// We simply won't support packfiles over 8 petabytes. I'm ok with that.
+function readUint64(buffer, offset) {
+  var hi = (buffer[offset] << 24 |
+            buffer[offset + 1] << 16 |
+            buffer[offset + 2] << 8 |
+            buffer[offset + 3] << 0) >>> 0;
+  var lo = (buffer[offset + 4] << 24 |
+            buffer[offset + 5] << 16 |
+            buffer[offset + 6] << 8 |
+            buffer[offset + 7] << 0) >>> 0;
+  return hi * 0x100000000 + lo;
+}
diff --git a/mixins/fs-db.js b/mixins/fs-db.js
index 12e1cb0..a344723 100644
--- a/mixins/fs-db.js
+++ b/mixins/fs-db.js
@@ -7,6 +7,7 @@ var parsePackEntry = require('../lib/pack-codec').parseEntry;
 var applyDelta = require('../lib/apply-delta');
 var sha1 = require('git-sha1');
 var pathJoin = require('path').join;
+var parseIndex = require('../lib/pack-index').parseIndex;
 
 // The fs object has the following interface:
 // - readFile(path) => binary
@@ -254,86 +255,3 @@ module.exports = function (repo, fs) {
   }
 
 };
-
-function parseIndex(buffer) {
-  if (readUint32(buffer, 0) !== 0xff744f63 ||
-      readUint32(buffer, 4) !== 0x00000002) {
-    throw new Error("Only v2 pack indexes supported");
-  }
-
-  // Get the number of hashes in index
-  // This is the value of the last fan-out entry
-  var hashOffset = 8 + 255 * 4;
-  var length = readUint32(buffer, hashOffset);
-  hashOffset += 4;
-  var crcOffset = hashOffset + 20 * length;
-  var lengthOffset = crcOffset + 4 * length;
-  var largeOffset = lengthOffset + 4 * length;
-  var checkOffset = largeOffset;
-  var indexes = new Array(length);
-  for (var i = 0; i < length; i++) {
-    var start = hashOffset + i * 20;
-    var hash = bodec.toHex(bodec.slice(buffer, start, start + 20));
-    var crc = readUint32(buffer, crcOffset + i * 4);
-    var offset = readUint32(buffer, lengthOffset + i * 4);
-    if (offset & 0x80000000) {
-      offset = largeOffset + (offset &0x7fffffff) * 8;
-      checkOffset = Math.max(checkOffset, offset + 8);
-      offset = readUint64(buffer, offset);
-    }
-    indexes[i] = {
-      hash: hash,
-      offset: offset,
-      crc: crc
-    };
-  }
-  var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20));
-  var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40));
-  if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) {
-    throw new Error("Checksum mistmatch");
-  }
-
-  var byHash = {};
-  indexes.sort(function (a, b) {
-    return a.offset - b.offset;
-  });
-  indexes.forEach(function (data) {
-    byHash[data.hash] = {
-      offset: data.offset,
-      crc: data.crc,
-    };
-  });
-  var offsets = indexes.map(function (entry) {
-    return entry.offset;
-  }).sort(function (a, b) {
-    return a - b;
-  });
-
-  return {
-    offsets: offsets,
-    byHash: byHash,
-    checksum: packChecksum
-  };
-}
-
-function readUint32(buffer, offset) {
-  return (buffer[offset] << 24 |
-          buffer[offset + 1] << 16 |
-          buffer[offset + 2] << 8 |
-          buffer[offset + 3] << 0) >>> 0;
-}
-
-// Yes this will lose precision over 2^53, but that can't be helped when
-// returning a single integer.
-// We simply won't support packfiles over 8 petabytes. I'm ok with that.
-function readUint64(buffer, offset) {
-  var hi = (buffer[offset] << 24 |
-            buffer[offset + 1] << 16 |
-            buffer[offset + 2] << 8 |
-            buffer[offset + 3] << 0) >>> 0;
-  var lo = (buffer[offset + 4] << 24 |
-            buffer[offset + 5] << 16 |
-            buffer[offset + 6] << 8 |
-            buffer[offset + 7] << 0) >>> 0;
-  return hi * 0x100000000 + lo;
-}
diff --git a/mixins/high-level.js b/mixins/high-level.js
index ea09c75..ac669db 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -31,7 +31,7 @@ function highLevel(repo, uName, uPass, hostName) {
     fetchStream.take(callback);
   }
 
-  function clone(branch, callback) {
+  function clone(branch, depth, callback) {
     var fetchStream = fetchPackProtocol(this.transport);
     fetchStream.take(function (err, refs) {
       if (!refs[branch]) {
@@ -45,6 +45,11 @@ function highLevel(repo, uName, uPass, hostName) {
       fetchStream.put({
         want: refs[branch]
       });
+      if (depth) {
+	fetchStream.put({
+          deepen: depth
+	});
+      }
 
       fetchStream.put(null);
       fetchStream.put({
@@ -149,11 +154,14 @@ function highLevel(repo, uName, uPass, hostName) {
 
         repo.treeWalk(commit.tree, function(err, item) {
           function collectFiles(err, object) {
-            if (object !== undefined) {
+            if (object !== undefined && !err) {
               repoStructure[object.path] = object;
               item.read(collectFiles);
             } else {
-                callback(repoStructure);
+	      if (err) {
+		console.log(err);
+	      }
+              callback(repoStructure);
             }
           }
 
diff --git a/mixins/http-db.js b/mixins/http-db.js
new file mode 100644
index 0000000..ccfc05f
--- /dev/null
+++ b/mixins/http-db.js
@@ -0,0 +1,191 @@
+// -*- mode: js; js-indent-level: 2; -*-
+
+// Get refs:
+// $ curl -H "Authorization: Basic bm9uZToxajM4OGRtZDhsZnRvamJuazI3enN0b3BrYXQ2bHZtcXU3dDYwOWh3cmdhdmt4N3Zkbw==" http://172.17.0.2:9001/2tlax-s0uqq-u6kz3a8x06tczjb.git/info/refs
+// 51779a651e7125f07b537cd1785bae642996f1f9	refs/heads/master
+//
+// Get object:
+// $ curl -O -H "Authorization: Basic bm9uZToxajM4OGRtZDhsZnRvamJuazI3enN0b3BrYXQ2bHZtcXU3dDYwOWh3cmdhdmt4N3Zkbw==" http://172.17.0.2:9001/a/a.git/objects/51/779a651e7125f07b537cd1785bae642996f1f9
+//
+// Get packs:
+// curl -H "Authorization: Basic bm9uZToxajM4OGRtZDhsZnRvamJuazI3enN0b3BrYXQ2bHZtcXU3dDYwOWh3cmdhdmt4N3Zkbw==" http://172.17.0.2:9001/2tlax-s0uqq-a997dxaw11u0lyr.git/objects/info/packs
+//
+// P pack-4cb362a32ab3424490c7c3dfe28dc69e4016459c.pack
+
+var request = require('../net/request-xhr');
+var inflate = require('../lib/inflate');
+var codec = require('../lib/object-codec.js');
+var sha1 = require('git-sha1');
+var parseIndex = require('../lib/pack-index').parseIndex;
+var parsePackEntry = require('../lib/pack-codec').parseEntry;
+var applyDelta = require('../lib/apply-delta');
+
+module.exports = mixin;
+var isHash = /^[0-9a-f]{40}$/;
+
+function mixin(repo, username, password, hostName) {
+  var cachedIndexes = {};
+  var headers = {};
+  if (username) {
+    headers.Authorization = "Basic " + btoa(username + ":" + (password || ""));
+  }
+
+  repo.readRef = readRef;
+  repo.listRefs = listRefs;
+
+  repo.loadAs = loadAs;
+  repo.loadRaw = loadRaw;
+
+  repo.hasHash = hasHash;
+
+  function readRef(ref, callback) {
+    return listRefs(null, function(err, out) {
+      console.log("out "+ref);
+      console.log(out);
+      callback(err, out[ref]);
+    });
+  }
+
+  function listRefs(prefix, callback) {
+    return request("GET", hostName+"/info/refs", headers, null, function (err, response) {
+      if (response.statusCode != 200) {
+	return callback("Error code " + response.statusCode, null);
+      }
+      var refs = {};
+      if (response.body) {
+	var regex = prefix && new RegExp("^" + prefix + "[/$]");
+	var sp = response.body.split("\n");
+	for (var i in sp) {
+	  var m = sp[i].match(/^([0-9a-f]{40})\t(.*)$/);
+	  if (m) {
+            if (regex && !regex.test(m[2])) continue;
+	    refs[m[2]] = m[1];
+	  }
+	}
+      }
+      console.log(refs);
+      callback(err, refs);
+    }, "text");
+  }
+
+  function hasHash(hash, callback) {
+    return loadRaw(hash, function (err, body) {
+      if (err) return callback(err);
+      return callback(null, !!body);
+    });
+  }
+
+  function loadAs(type, hash, callback) {
+    return loadRaw(hash, function(err, buffer) {
+      if (!buffer) return [];
+      var obj = codec.deframe(buffer, true);
+      if (obj.type !== type) throw new TypeError("Type mismatch " + obj.type + "!==" + type);
+      callback(err, obj.body);
+    });
+  }
+
+  function loadRaw(hash, callback) {
+    return request("GET", hostName+"/objects/"+hash.substr(0, 2)+"/"+hash.substr(2), headers, null, function(err, response) {
+      if (response.statusCode == 200) {
+	var raw;
+	try { raw = inflate(response.body); }
+	catch (err) { return callback(err); }
+	return callback(err, raw);
+      }
+      return loadRawPacked(hash, callback);
+    }, "arraybuffer");
+  }
+
+  function loadRawPacked(hash, callback) {
+    var packHashes = [];
+    return request("GET", hostName+"/objects/info/packs", headers, null, function(err, response) {
+      if (!response.body) return callback(err);
+      response.body.split("\n").forEach(function (line) {
+        var match = line.match(/P pack-([0-9a-f]{40}).pack/);
+        if (match) packHashes.push(match[1]);
+      });
+      start();
+    }, "text");
+
+    function start() {
+      var packHash = packHashes.pop();
+      var offsets;
+      if (!packHash) return callback();
+      if (!cachedIndexes[packHash]) loadIndex(packHash);
+      else onIndex();
+
+      function loadIndex() {
+	return request("GET", hostName+"/objects/pack/pack-" + packHash + ".idx", headers, null, function(err, response) {
+	  var buffer = response.body;
+          if (!buffer) return callback(err);
+	  console.log("Looking at index");
+          try {
+            cachedIndexes[packHash] = parseIndex(buffer);
+          }
+          catch (err) {
+	    console.log("failure " +err);
+	    return callback(err); }
+	  console.log("cachedIndexes");
+	  console.log(cachedIndexes);
+          onIndex();
+        });
+      }
+
+      function onIndex() {
+        var cached = cachedIndexes[packHash];
+        var packFile = hostName+"/objects/pack/pack-" + packHash + ".pack";
+        var index = cached.byHash[hash];
+	console.log("looking for "+hash+" in "+packHash+" index");
+	console.log(index);
+        if (!index) return start();
+        offsets = cached.offsets;
+        loadChunk(packFile, index.offset, callback);
+      }
+
+      function loadChunk(packFile, start, callback) {
+        var index = offsets.indexOf(start);
+        if (index < 0) {
+          var error = new Error("Can't find chunk starting at " + start);
+          return callback(error);
+        }
+        var end = index + 1 < offsets.length ? offsets[index + 1] : -20;
+	// FIXME git http-backend doesn't actually support Range requests,
+	// so this doesn't work.  Will need to download the whole packfile.
+	var headerWithRange = {Authorization: headers.Authorization, Range: "bytes="+start+"-"+end};
+	console.log("loading chunk "+packFile);
+	console.log(headerWithRange);
+	return request("GET", packFile, headerWithRange, null, function(err, response) {
+	  var chunk = response.body;
+          if (!chunk) return callback(err);
+          var raw;
+          try {
+            var entry = parsePackEntry(chunk);
+            if (entry.type === "ref-delta") {
+              return loadRaw.call(repo, entry.ref, onBase);
+            }
+            else if (entry.type === "ofs-delta") {
+              return loadChunk(packFile, start - entry.ref, onBase);
+            }
+            raw = codec.frame(entry);
+          }
+          catch (err) { return callback(err); }
+          callback(null, raw);
+
+          function onBase(err, base) {
+            if (!base) return callback(err);
+            var object = codec.deframe(base);
+            var buffer;
+            try {
+              object.body = applyDelta(entry.body, object.body);
+              buffer = codec.frame(object);
+            }
+            catch (err) { return callback(err); }
+            callback(null, buffer);
+          }
+        });
+      }
+
+    }
+  }
+
+}
diff --git a/net/git-fetch-pack.js b/net/git-fetch-pack.js
index aee8347..34ab053 100644
--- a/net/git-fetch-pack.js
+++ b/net/git-fetch-pack.js
@@ -1,3 +1,5 @@
+// -*- mode: js; js-indent-level: 2; -*-
+
 "use strict";
 
 var makeChannel = require('culvert');
diff --git a/net/request-xhr.js b/net/request-xhr.js
index 5bf9064..2f00ae3 100644
--- a/net/request-xhr.js
+++ b/net/request-xhr.js
@@ -1,8 +1,10 @@
+// -*- mode: js; js-indent-level: 2; -*-
+
 "use strict";
 
 module.exports = request;
 
-function request(method, url, headers, body, callback) {
+function request(method, url, headers, body, callback, responseType) {
   if (typeof body === "function") {
     callback = body;
     body = undefined;
@@ -12,7 +14,10 @@ function request(method, url, headers, body, callback) {
   }
   var xhr = new XMLHttpRequest();
   xhr.open(method, url, true);
-  xhr.responseType = "arraybuffer";
+  if (!responseType) {
+    responseType = "arraybuffer";
+  }
+  xhr.responseType = responseType;
 
   Object.keys(headers).forEach(function (name) {
     xhr.setRequestHeader(name, headers[name]);
@@ -26,10 +31,15 @@ function request(method, url, headers, body, callback) {
       resHeaders[line.substring(0, index).toLowerCase()] = line.substring(index + 1).trim();
     });
 
+    var body = xhr.response;
+    if (body && xhr.responseType == "arraybuffer") {
+      body = new Uint8Array(body);
+    }
+
     callback(null, {
       statusCode: xhr.status,
       headers: resHeaders,
-      body: xhr.response && new Uint8Array(xhr.response)
+      body: body
     });
   };
   xhr.send(body);

From 6f83c2b0f56c32bf650114e04a7bd470724cdd34 Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Thu, 25 Jan 2018 20:51:18 -0500
Subject: [PATCH 17/18] * On error, callback with error object.

* clone() updates repo to latest upstream ref.
---
 mixins/high-level.js | 32 ++++++++++++++++++++------------
 1 file changed, 20 insertions(+), 12 deletions(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index ac669db..2233b31 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -27,12 +27,12 @@ function highLevel(repo, uName, uPass, hostName) {
   repo.transport = transport;
 
   function remoteRefs(callback) {
-    var fetchStream = fetchPackProtocol(this.transport);
+    var fetchStream = fetchPackProtocol(this.transport, callback);
     fetchStream.take(callback);
   }
 
   function clone(branch, depth, callback) {
-    var fetchStream = fetchPackProtocol(this.transport);
+    var fetchStream = fetchPackProtocol(this.transport, callback);
     fetchStream.take(function (err, refs) {
       if (!refs[branch]) {
 	// create empty branch
@@ -50,18 +50,26 @@ function highLevel(repo, uName, uPass, hostName) {
           deepen: depth
 	});
       }
-
       fetchStream.put(null);
-      fetchStream.put({
-        done: true
-      });
 
-      fetchStream.take(function (err, channels) {
-        repo.unpack(channels.pack, {}, function () {
-          repo.updateRef(branch, refs[branch], function () {
-            return callback('Repo is cloned.');
+      repo.listRefs(false, function (err, haveRefs) {
+	Object.values(haveRefs).forEach(function (refhash) {
+	  fetchStream.put({
+	    have: refhash
+	  });
+	});
+
+	fetchStream.put({
+          done: true
+	});
+
+	fetchStream.take(function (err, channels) {
+          repo.unpack(channels.pack, {}, function () {
+            repo.updateRef(branch, refs[branch], function () {
+              return callback('Repo is cloned to '+refs[branch]);
+            });
           });
-        });
+	});
       });
     });
   }
@@ -96,7 +104,7 @@ function highLevel(repo, uName, uPass, hostName) {
     var self = this;
     repo.readRef(branch, function(err, refHash) {
       repo.loadAs('commit', refHash, function(err, commit) {
-        var pushStream = sendPackProtocol(self.transport);
+        var pushStream = sendPackProtocol(self.transport, callback);
         pushStream.take(function() {
 	  if (commit.parents[0] === undefined) {
             pushStream.put({ oldhash: "0000000000000000000000000000000000000000", newhash: refHash, ref: branch });

From d9314f13f8c71ad3b97c5cef6ec9b5108ed304cb Mon Sep 17 00:00:00 2001
From: Peter Amstutz <pamstutz@veritasgenetics.com>
Date: Thu, 25 Jan 2018 22:09:52 -0500
Subject: [PATCH 18/18] Propagate push errors up.

---
 mixins/high-level.js  | 12 +++++++++++-
 net/git-send-pack.js  | 33 ++++++++++++++++++---------------
 net/transport-http.js | 23 ++++++++++++-----------
 3 files changed, 41 insertions(+), 27 deletions(-)

diff --git a/mixins/high-level.js b/mixins/high-level.js
index 2233b31..79661fc 100644
--- a/mixins/high-level.js
+++ b/mixins/high-level.js
@@ -127,7 +127,17 @@ function highLevel(repo, uName, uPass, hostName) {
                       stream.take(putHashes);
                     } else {
                       pushStream.put({flush: true});
-                      return callback('Push done.');
+		      var takedone = function(_, response) {
+			if (response && response.progress) {
+			  callback(response.progress);
+			}
+			if (response === null) {
+			  return callback(null);
+			} else {
+			  pushStream.take(takedone);
+			}
+		      }
+		      pushStream.take(takedone);
                     }
                   }
 
diff --git a/net/git-send-pack.js b/net/git-send-pack.js
index 7bf533c..e1455e1 100644
--- a/net/git-send-pack.js
+++ b/net/git-send-pack.js
@@ -1,3 +1,4 @@
+// -*- mode: js; js-indent-level: 2; -*-
 "use strict";
 
 var makeChannel = require('culvert');
@@ -90,17 +91,17 @@ function sendPack(transport, onError) {
     if (line.oldhash) {
       var extra = "";
       if (!capsSent) {
-          capsSent = true;
-          var caplist = [];
-          if (caps["ofs-delta"]) caplist.push("ofs-delta");
-          if (caps["thin-pack"]) caplist.push("thin-pack");
-          // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
-          // else if (caps["multi_ack"]) extra +=" multi_ack";
-          if (caps["side-band-64k"]) caplist.push("side-band-64k");
-          else if (caps["side-band"]) caplist.push("side-band");
-          // if (caps["agent"]) extra += " agent=" + agent;
-          if (caps.agent) extra += caplist.push("agent=" + caps.agent);
-          extra = "\0" + caplist.join(" ");
+        capsSent = true;
+        var caplist = [];
+        if (caps["ofs-delta"]) caplist.push("ofs-delta");
+        if (caps["thin-pack"]) caplist.push("thin-pack");
+        // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed";
+        // else if (caps["multi_ack"]) extra +=" multi_ack";
+        if (caps["side-band-64k"]) caplist.push("side-band-64k");
+        else if (caps["side-band"]) caplist.push("side-band");
+        // if (caps["agent"]) extra += " agent=" + agent;
+        if (caps.agent) extra += caplist.push("agent=" + caps.agent);
+        extra = "\0" + caplist.join(" ");
       }
       extra += "\n";
       socket.put(line.oldhash + " " + line.newhash + " " + line.ref + extra);
@@ -112,9 +113,11 @@ function sendPack(transport, onError) {
   function onPack(_, line) {
     if (line.flush) {
       socket.put(line);
-      socket.take(function (_, h) {
-        api.put(h);
-      });
+      var fwd = function(_, b) {
+	api.put(b);
+	socket.take(fwd);
+      }
+      socket.take(fwd);
     } else {
       socket.put({
         noframe: line
@@ -136,4 +139,4 @@ function throwIt(err) {
     throw err;
   });
   // throw err;
-}
\ No newline at end of file
+}
diff --git a/net/transport-http.js b/net/transport-http.js
index d8c89c2..493317b 100644
--- a/net/transport-http.js
+++ b/net/transport-http.js
@@ -1,3 +1,4 @@
+// -*- mode: js; js-indent-level: 2; -*-
 "use strict";
 
 var makeChannel = require('culvert');
@@ -80,18 +81,18 @@ module.exports = function (request) {
       }
 
       function onWrite(item) {
-          if (item === undefined) return socket.put();
-          if (item === null || !item.flush) {
-              if (item !== null && item.noframe !== undefined) {
-                  bodyParts.push(item.noframe);
-              } else {
-                  bodyWrite(item);
-              }
-          }
-          socket.take(onWrite);
-          if (item === null || (!item.flush)) {
-              if ((item !== "done\n" || !bodyParts.length) ) return;
+        if (item === undefined) return socket.put();
+        if (item === null || !item.flush) {
+          if (item !== null && item.noframe !== undefined) {
+            bodyParts.push(item.noframe);
+          } else {
+            bodyWrite(item);
           }
+        }
+        socket.take(onWrite);
+        if (item === null || (!item.flush)) {
+          if ((item !== "done\n" || !bodyParts.length) ) return;
+        }
         var body = bodec.join(bodyParts);
         bodyParts.length = 0;
         request("POST", gitUrl + "/" + serviceName, headers, body, onResult);