commit 14b34c57489a935c25d1bf14c9c3a98850d9e428
Author: Santtu Lakkala <[email protected]>
Date:   Thu Feb 17 18:01:38 2022 +0200

    [st][patch][copyurl] Updated version of the patch
    
    Slightly simplified code and fixed issues when mixed with selection.

diff --git a/st.suckless.org/patches/copyurl/index.md 
b/st.suckless.org/patches/copyurl/index.md
index 4e1586e5..d7bf989c 100644
--- a/st.suckless.org/patches/copyurl/index.md
+++ b/st.suckless.org/patches/copyurl/index.md
@@ -22,6 +22,7 @@ Following patches also highlight the selected urls:
 
 * [st-copyurl-20190202-3be4cf1.diff](st-copyurl-20190202-3be4cf1.diff)
 * [st-copyurl-0.8.4.diff](st-copyurl-0.8.4.diff)
+* [st-copyurl-20220217-0.8.5.diff](st-copyurl-20220217-0.8.5.diff)
 
 Authors
 -------
diff --git a/st.suckless.org/patches/copyurl/st-copyurl-20220217-0.8.5.diff 
b/st.suckless.org/patches/copyurl/st-copyurl-20220217-0.8.5.diff
new file mode 100644
index 00000000..de136792
--- /dev/null
+++ b/st.suckless.org/patches/copyurl/st-copyurl-20220217-0.8.5.diff
@@ -0,0 +1,136 @@
+From eacbcf7de84e81a27759d086ad1a5ea9e41f9894 Mon Sep 17 00:00:00 2001
+From: Santtu Lakkala <[email protected]>
+Date: Wed, 16 Feb 2022 20:34:20 +0200
+Subject: [PATCH] Loop through urls on screen and copy to clipboard
+
+Based on the previous highlighting patches, slightly simplified and
+fixes graphical issues with mixed copyurl and selection.
+---
+ config.def.h |  1 +
+ st.c         | 81 ++++++++++++++++++++++++++++++++++++++++++++++++++++
+ st.h         |  1 +
+ 3 files changed, 83 insertions(+)
+
+diff --git a/config.def.h b/config.def.h
+index 91ab8ca..3f365c7 100644
+--- a/config.def.h
++++ b/config.def.h
+@@ -201,6 +201,7 @@ static Shortcut shortcuts[] = {
+       { TERMMOD,              XK_Y,           selpaste,       {.i =  0} },
+       { ShiftMask,            XK_Insert,      selpaste,       {.i =  0} },
+       { TERMMOD,              XK_Num_Lock,    numlock,        {.i =  0} },
++      { MODKEY,               XK_l,           copyurl,        {.i =  0} },
+ };
+ 
+ /*
+diff --git a/st.c b/st.c
+index 51049ba..931b29f 100644
+--- a/st.c
++++ b/st.c
+@@ -200,6 +200,7 @@ static void tdefutf8(char);
+ static int32_t tdefcolor(const int *, int *, int);
+ static void tdeftran(char);
+ static void tstrsequence(uchar);
++static const char *findlastany(const char *, const char**, size_t);
+ 
+ static void drawregion(int, int, int, int);
+ 
+@@ -2688,3 +2689,83 @@ redraw(void)
+       tfulldirt();
+       draw();
+ }
++
++const char *
++findlastany(const char *str, const char**find, size_t len)
++{
++      const char *found = NULL;
++      int i = 0;
++
++      for (found = str + strlen(str) - 1; found >= str; --found) {
++              for(i = 0; i < len; i++) {
++                      if (strncmp(found, find[i], strlen(find[i])) == 0) {
++                              return found;
++                      }
++              }
++      }
++
++      return NULL;
++}
++
++/*
++** Select and copy the previous url on screen (do nothing if there's no url).
++**
++** FIXME: doesn't handle urls that span multiple lines; will need to add 
support
++**        for multiline "getsel()" first
++*/
++void
++copyurl(const Arg *arg) {
++      /* () and [] can appear in urls, but excluding them here will reduce 
false
++       * positives when figuring out where a given url ends.
++       */
++      static const char URLCHARS[] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
++              "abcdefghijklmnopqrstuvwxyz"
++              "0123456789-._~:/?#@!$&'*+,;=%";
++
++      static const char* URLSTRINGS[] = {"http://";, "https://"};
++
++      int row = 0, /* row of current URL */
++              col = 0, /* column of current URL start */
++              colend = 0, /* column of last occurrence */
++              passes = 0; /* how many rows have been scanned */
++
++      char linestr[term.col + 1];
++      const char *c = NULL,
++               *match = NULL;
++
++      row = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.y : term.bot;
++      LIMIT(row, term.top, term.bot);
++
++      colend = (sel.ob.x >= 0 && sel.nb.y > 0) ? sel.nb.x : term.col;
++      LIMIT(colend, 0, term.col);
++
++      /*
++      ** Scan from (term.row - 1,term.col - 1) to (0,0) and find
++      ** next occurrance of a URL
++      */
++      for (passes = 0; passes < term.row; passes++) {
++              /* Read in each column of every row until
++              ** we hit previous occurrence of URL
++              */
++              for (col = 0; col < colend; ++col)
++                      linestr[col] = term.line[row][col].u < 128 ? 
term.line[row][col].u : ' ';
++              linestr[col] = '++
++              if ((match = findlastany(linestr, URLSTRINGS,
++                                              
sizeof(URLSTRINGS)/sizeof(URLSTRINGS[0]))))
++                      break;
++
++              if (--row < 0)
++                      row = term.row - 1;
++
++              colend = term.col;
++      };
++
++      if (match) {
++              size_t l = strspn(match, URLCHARS);
++              selstart(match - linestr, row, 1);
++              selextend(match - linestr + l - 1, row, 1, SEL_REGULAR);
++              xsetsel(getsel());
++              xclipcopy();
++      }
++}
+diff --git a/st.h b/st.h
+index 519b9bd..0458005 100644
+--- a/st.h
++++ b/st.h
+@@ -85,6 +85,7 @@ void printscreen(const Arg *);
+ void printsel(const Arg *);
+ void sendbreak(const Arg *);
+ void toggleprinter(const Arg *);
++void copyurl(const Arg *);
+ 
+ int tattrset(int);
+ void tnew(int, int);
+-- 
+2.32.0
+


Reply via email to