commit b817d5f061360f4812cdeb760281863ff4ddb5b6
Author: Michael Buch <[email protected]>
Date:   Sun May 20 01:43:25 2018 +0100

    Handle multiple URLs on single line scenario
    
    When cycling through URLs, instead of picking the first
    URL of each line, cycle through URLs on a single line
    from back to front

diff --git a/st.suckless.org/patches/copyurl/index.md 
b/st.suckless.org/patches/copyurl/index.md
index c26ba4a2..b078e827 100644
--- a/st.suckless.org/patches/copyurl/index.md
+++ b/st.suckless.org/patches/copyurl/index.md
@@ -25,6 +25,7 @@ Download
 Following patches also highlight the selected urls:
 
  * [st-copyurl-20180514-a7bd977.diff](st-copyurl-20180514-a7bd977.diff)
+ * [st-copyurl-20180520-53d59ef.diff](st-copyurl-20180520-53d59ef.diff)
 
 Authors
 -------
diff --git a/st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff 
b/st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff
new file mode 100644
index 00000000..4a2e7e1b
--- /dev/null
+++ b/st.suckless.org/patches/copyurl/st-copyurl-20180520-53d59ef.diff
@@ -0,0 +1,179 @@
+From 53d59ef83576afface82889e21bf86ed9c288496 Mon Sep 17 00:00:00 2001
+From: Michael Buch <[email protected]>
+Date: Sun, 20 May 2018 01:35:03 +0100
+Subject: [PATCH] Handle multiple URLs on single line scenario
+
+When cycling through URLs, instead of picking the first
+URL of each line, cycle through URLs on a single line
+from back to front
+---
+ config.def.h |   1 +
+ st.c         | 116 +++++++++++++++++++++++++++++++++++++++++++++++++++
+ st.h         |   1 +
+ 3 files changed, 118 insertions(+)
+
+diff --git a/config.def.h b/config.def.h
+index 82b1b09..cbe923e 100644
+--- a/config.def.h
++++ b/config.def.h
+@@ -178,6 +178,7 @@ static Shortcut shortcuts[] = {
+       { TERMMOD,              XK_Y,           selpaste,       {.i =  0} },
+       { TERMMOD,              XK_Num_Lock,    numlock,        {.i =  0} },
+       { TERMMOD,              XK_I,           iso14755,       {.i =  0} },
++      { MODKEY,               XK_l,           copyurl,        {.i =  0} },
+ };
+ 
+ /*
+diff --git a/st.c b/st.c
+index 0628707..309aa89 100644
+--- a/st.c
++++ b/st.c
+@@ -204,6 +204,7 @@ static void tdefutf8(char);
+ static int32_t tdefcolor(int *, int *, int);
+ static void tdeftran(char);
+ static void tstrsequence(uchar);
++static void tsetcolor(int, int, int, uint32_t, uint32_t);
+ 
+ static void drawregion(int, int, int, int);
+ 
+@@ -1600,6 +1601,17 @@ tsetmode(int priv, int set, int *args, int narg)
+       }
+ }
+ 
++void
++tsetcolor( int row, int start, int end, uint32_t fg, uint32_t bg )
++{
++      int i = start;
++      for( ; i < end; ++i )
++      {
++              term.line[row][i].fg = fg;
++              term.line[row][i].bg = bg;
++      }
++}
++
+ void
+ csihandle(void)
+ {
+@@ -2617,3 +2629,107 @@ redraw(void)
+       tfulldirt();
+       draw();
+ }
++
++char *
++findlast(char *str, const char* find)
++{
++      char* found = NULL;
++      for(found = str + strlen(str) - 1; found >= str; --found) {
++              if(strncmp(found, find, strlen(find)) == 0) {
++                      return found;
++              }
++      }
++
++      return NULL;
++}
++
++/*
++** Select and copy the previous url on screen (do nothing if there's no url).
++**
++** FIXME: doesn't handle urls that span multiple lines; will need to add 
support
++**        for multiline "getsel()" first
++*/
++void
++copyurl(const Arg *arg) {
++      /* () and [] can appear in urls, but excluding them here will reduce 
false
++       * positives when figuring out where a given url ends.
++       */
++      static char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
++              "abcdefghijklmnopqrstuvwxyz"
++              "0123456789-._~:/?#@!$&'*+,;=%";
++
++      /* remove highlighting from previous selection if any */
++      if(sel.ob.x >= 0 && sel.oe.x >= 0)
++              tsetcolor(sel.nb.y, sel.ob.x, sel.oe.x + 1, defaultfg, 
defaultbg);
++
++      int i = 0,
++              row = 0,
++              col = 0,
++              startrow = 0,
++              colend = 0;
++
++      char *linestr = calloc(sizeof(char), term.col+1); /* assume ascii */
++      char *c = NULL,
++               *match = NULL;
++
++      row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y : term.bot;
++      LIMIT(row, term.top, term.bot);
++      startrow = row;
++
++      colend = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.x : term.col;
++      LIMIT(colend, 0, term.col);
++
++      /*
++      ** Scan from (term.bot,term.col) to (0,0) and find
++      ** next occurrance of a URL
++      */
++      do {
++              /* Read in each column of every row until
++              ** we hit previous occurrence of URL
++              */
++              for (col = 0, i = 0; col < colend; ++col,++i) {
++                      /* assume ascii */
++                      if (term.line[row][col].u > 127)
++                              continue;
++                      linestr[i] = term.line[row][col].u;
++              }
++              linestr[term.col] = '

Reply via email to